From b5fe4c75944deea540b807021e0f88401878118f Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Fri, 17 Mar 2023 16:22:48 +0100 Subject: [PATCH 0001/1112] Fix placement of allowedApiCalls in example manifests allowedApiCalls should be at the top level, not inside toolParameters. --- .../external-tools/dynamicDatasetTool.json | 20 +++++++++---------- .../root/external-tools/fabulousFileTool.json | 18 ++++++++--------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json index 47413c8a625..22dd6477cb4 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json @@ -14,14 +14,14 @@ { "locale":"{localeCode}" } - ], - "allowedApiCalls": [ - { - "name":"retrieveDatasetJson", - "httpMethod":"GET", - "urlTemplate":"/api/v1/datasets/{datasetId}", - "timeOut":10 - } - ] - } + ] + }, + "allowedApiCalls": [ + { + "name":"retrieveDatasetJson", + "httpMethod":"GET", + "urlTemplate":"/api/v1/datasets/{datasetId}", + "timeOut":10 + } + ] } diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json index 1c132576099..2b6a0b8e092 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json @@ -21,14 +21,14 @@ { "locale":"{localeCode}" } - ], - "allowedApiCalls": [ - { - "name":"retrieveDataFile", - "httpMethod":"GET", - "urlTemplate":"/api/v1/access/datafile/{fileId}", - "timeOut":270 - } ] - } + }, + "allowedApiCalls": [ + { + "name":"retrieveDataFile", + "httpMethod":"GET", + "urlTemplate":"/api/v1/access/datafile/{fileId}", + "timeOut":270 + } + ] } From d76092c1ec57a835920b8fd10e6883299f8b6d3a Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Fri, 17 Mar 2023 16:24:41 +0100 Subject: [PATCH 0002/1112] Add missing break to DATASET case Without this it also evaluates the FILE case causing NPE when dataFile is accessed. --- .../harvard/iq/dataverse/externaltools/ExternalToolHandler.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index 88a51017b75..dac046373ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -111,6 +111,7 @@ public String handleRequest(boolean preview) { case DATASET: callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId(); + break; case FILE: callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/" + dataFile.getId() + "/metadata/" + fileMetadata.getId() + "/toolparams/" From ecac37fbd64c83bfc8d045ae3204ab86dc7bc29d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 2 May 2023 10:52:13 -0400 Subject: [PATCH 0003/1112] initial Globus Store class with some quick test code --- pom.xml | 7 +- .../dataaccess/GlobusOverlayAccessIO.java | 655 ++++++++++++++++++ .../dataaccess/RemoteOverlayAccessIO.java | 34 +- .../iq/dataverse/settings/JvmSettings.java | 2 + 4 files changed, 680 insertions(+), 18 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java diff --git a/pom.xml b/pom.xml index 5f514819947..e5b191f0ed7 100644 --- a/pom.xml +++ b/pom.xml @@ -167,8 +167,13 @@ org.eclipse.microprofile.config microprofile-config-api - provided + + + org.apache.geronimo.config + geronimo-config-impl + 1.0 + jakarta.platform jakarta.jakartaee-api diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java new file mode 100644 index 00000000000..fe62e25ad6f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -0,0 +1,655 @@ +package edu.harvard.iq.dataverse.dataaccess; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.channels.Channel; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.file.Path; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.List; +import java.util.function.Predicate; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.apache.commons.lang3.NotImplementedException; +import org.apache.http.client.config.CookieSpecs; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustAllStrategy; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContextBuilder; +import org.apache.http.util.EntityUtils; + +import javax.net.ssl.SSLContext; + +/** + * @author qqmyers + * @param what it stores + */ +/* + * Globus Overlay Driver + * + * StorageIdentifier format: + * :///// + */ +public class GlobusOverlayAccessIO extends StorageIO { + + private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO"); + + private StorageIO baseStore = null; + private String path = null; + private String endpointWithBasePath = null; + + private static HttpClientContext localContext = HttpClientContext.create(); + private PoolingHttpClientConnectionManager cm = null; + CloseableHttpClient httpclient = null; + private int timeout = 1200; + private RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000) + .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000) + .setCookieSpec(CookieSpecs.STANDARD).setExpectContinueEnabled(true).build(); + private static boolean trustCerts = false; + private int httpConcurrency = 4; + + public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { + super(dvObject, req, driverId); + this.setIsLocalFile(false); + configureStores(req, driverId, null); + logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); + path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); + validatePath(path); + + logger.fine("Relative path: " + path); + } + + public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException { + super(null, null, driverId); + this.setIsLocalFile(false); + configureStores(null, driverId, storageLocation); + + path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); + validatePath(path); + logger.fine("Relative path: " + path); + } + + private void validatePath(String relPath) throws IOException { + try { + URI absoluteURI = new URI(endpointWithBasePath + "/" + relPath); + if(!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { + throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s endpoint/basePath"); + } + } catch(URISyntaxException use) { + throw new IOException("Could not interpret storageidentifier in remote store " + this.driverId); + } + } + + + @Override + public void open(DataAccessOption... options) throws IOException { + + baseStore.open(options); + + DataAccessRequest req = this.getRequest(); + + if (isWriteAccessRequested(options)) { + isWriteAccess = true; + isReadAccess = false; + } else { + isWriteAccess = false; + isReadAccess = true; + } + + if (dvObject instanceof DataFile) { + String storageIdentifier = dvObject.getStorageIdentifier(); + + DataFile dataFile = this.getDataFile(); + + if (req != null && req.getParameter("noVarHeader") != null) { + baseStore.setNoVarHeader(true); + } + + if (storageIdentifier == null || "".equals(storageIdentifier)) { + throw new FileNotFoundException("Data Access: No local storage identifier defined for this datafile."); + } + + // Fix new DataFiles: DataFiles that have not yet been saved may use this method + // when they don't have their storageidentifier in the final form + // So we fix it up here. ToDo: refactor so that storageidentifier is generated + // by the appropriate StorageIO class and is final from the start. + logger.fine("StorageIdentifier is: " + storageIdentifier); + + if (isReadAccess) { + if (dataFile.getFilesize() >= 0) { + this.setSize(dataFile.getFilesize()); + } else { + logger.fine("Setting size"); + this.setSize(getSizeFromGlobus()); + } + if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") + && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) { + + List datavariables = dataFile.getDataTable().getDataVariables(); + String varHeaderLine = generateVariableHeader(datavariables); + this.setVarHeader(varHeaderLine); + } + + } + + this.setMimeType(dataFile.getContentType()); + + try { + this.setFileName(dataFile.getFileMetadata().getLabel()); + } catch (Exception ex) { + this.setFileName("unknown"); + } + } else if (dvObject instanceof Dataset) { + throw new IOException( + "Data Access: RemoteOverlay Storage driver does not support dvObject type Dataverse yet"); + } else if (dvObject instanceof Dataverse) { + throw new IOException( + "Data Access: RemoteOverlay Storage driver does not support dvObject type Dataverse yet"); + } else { + this.setSize(getSizeFromGlobus()); + } + } + + private long getSizeFromGlobus() { + throw new NotImplementedException(); + /* + long size = -1; + HttpHead head = new HttpHead(endpointWithBasePath + "/" + path); + try { + CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); + + try { + int code = response.getStatusLine().getStatusCode(); + logger.fine("Response for HEAD: " + code); + switch (code) { + case 200: + Header[] headers = response.getHeaders(HTTP.CONTENT_LEN); + logger.fine("Num headers: " + headers.length); + String sizeString = response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); + logger.fine("Content-Length: " + sizeString); + size = Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); + logger.fine("Found file size: " + size); + break; + default: + logger.warning("Response from " + head.getURI().toString() + " was " + code); + } + } finally { + EntityUtils.consume(response.getEntity()); + } + } catch (IOException e) { + logger.warning(e.getMessage()); + } + return size; + */ + } + + @Override + public InputStream getInputStream() throws IOException { + if (super.getInputStream() == null) { + try { + HttpGet get = new HttpGet(generateTemporaryDownloadUrl(null, null, null)); + CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); + + int code = response.getStatusLine().getStatusCode(); + switch (code) { + case 200: + setInputStream(response.getEntity().getContent()); + break; + default: + logger.warning("Response from " + get.getURI().toString() + " was " + code); + throw new IOException("Cannot retrieve: " + endpointWithBasePath + "/" + path + " code: " + code); + } + } catch (Exception e) { + logger.warning(e.getMessage()); + e.printStackTrace(); + throw new IOException("Error retrieving: " + endpointWithBasePath + "/" + path + " " + e.getMessage()); + + } + setChannel(Channels.newChannel(super.getInputStream())); + } + return super.getInputStream(); + } + + @Override + public Channel getChannel() throws IOException { + if (super.getChannel() == null) { + getInputStream(); + } + return channel; + } + + @Override + public ReadableByteChannel getReadChannel() throws IOException { + // Make sure StorageIO.channel variable exists + getChannel(); + return super.getReadChannel(); + } + + @Override + public void delete() throws IOException { + // Delete is best-effort - we tell the remote server and it may or may not + // implement this call + if (!isDirectAccess()) { + throw new IOException("Direct Access IO must be used to permanently delete stored file objects"); + } + try { + HttpDelete del = new HttpDelete(endpointWithBasePath + "/" + path); + CloseableHttpResponse response = getSharedHttpClient().execute(del, localContext); + try { + int code = response.getStatusLine().getStatusCode(); + switch (code) { + case 200: + logger.fine("Sent DELETE for " + endpointWithBasePath + "/" + path); + default: + logger.fine("Response from DELETE on " + del.getURI().toString() + " was " + code); + } + } finally { + EntityUtils.consume(response.getEntity()); + } + } catch (Exception e) { + logger.warning(e.getMessage()); + throw new IOException("Error deleting: " + endpointWithBasePath + "/" + path); + + } + + // Delete all the cached aux files as well: + deleteAllAuxObjects(); + + } + + @Override + public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException { + return baseStore.openAuxChannel(auxItemTag, options); + } + + @Override + public boolean isAuxObjectCached(String auxItemTag) throws IOException { + return baseStore.isAuxObjectCached(auxItemTag); + } + + @Override + public long getAuxObjectSize(String auxItemTag) throws IOException { + return baseStore.getAuxObjectSize(auxItemTag); + } + + @Override + public Path getAuxObjectAsPath(String auxItemTag) throws IOException { + return baseStore.getAuxObjectAsPath(auxItemTag); + } + + @Override + public void backupAsAux(String auxItemTag) throws IOException { + baseStore.backupAsAux(auxItemTag); + } + + @Override + public void revertBackupAsAux(String auxItemTag) throws IOException { + baseStore.revertBackupAsAux(auxItemTag); + } + + @Override + // this method copies a local filesystem Path into this DataAccess Auxiliary + // location: + public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException { + baseStore.savePathAsAux(fileSystemPath, auxItemTag); + } + + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag, filesize); + } + + /** + * @param inputStream InputStream we want to save + * @param auxItemTag String representing this Auxiliary type ("extension") + * @throws IOException if anything goes wrong. + */ + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag); + } + + @Override + public List listAuxObjects() throws IOException { + return baseStore.listAuxObjects(); + } + + @Override + public void deleteAuxObject(String auxItemTag) throws IOException { + baseStore.deleteAuxObject(auxItemTag); + } + + @Override + public void deleteAllAuxObjects() throws IOException { + baseStore.deleteAllAuxObjects(); + } + + @Override + public String getStorageLocation() throws IOException { + String fullStorageLocation = dvObject.getStorageIdentifier(); + logger.fine("storageidentifier: " + fullStorageLocation); + int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR); + if(driverIndex >=0) { + fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + } + if (this.getDvObject() instanceof Dataset) { + throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject"); + } else if (this.getDvObject() instanceof DataFile) { + fullStorageLocation = StorageIO.getDriverPrefix(this.driverId) + fullStorageLocation; + } else if (dvObject instanceof Dataverse) { + throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); + } + logger.fine("fullStorageLocation: " + fullStorageLocation); + return fullStorageLocation; + } + + @Override + public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: this is a remote DataAccess IO object, it has no local filesystem path associated with it."); + } + + @Override + public boolean exists() { + logger.fine("Exists called"); + return (getSizeFromGlobus() != -1); + } + + @Override + public WritableByteChannel getWriteChannel() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: there are no write Channels associated with S3 objects."); + } + + @Override + public OutputStream getOutputStream() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: there are no output Streams associated with S3 objects."); + } + + @Override + public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { + return baseStore.getAuxFileAsInputStream(auxItemTag); + } + + @Override + public boolean downloadRedirectEnabled() { + String optionValue = System.getProperty("dataverse.files." + this.driverId + ".download-redirect"); + if ("true".equalsIgnoreCase(optionValue)) { + return true; + } + return false; + } + + public boolean downloadRedirectEnabled(String auxObjectTag) { + return baseStore.downloadRedirectEnabled(auxObjectTag); + } + + @Override + public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName) + throws IOException { + + // ToDo - support remote auxiliary Files + if (auxiliaryTag == null) { + String secretKey = System.getProperty("dataverse.files." + this.driverId + ".secret-key"); + if (secretKey == null) { + return endpointWithBasePath + "/" + path; + } else { + return UrlSignerUtil.signUrl(endpointWithBasePath + "/" + path, getUrlExpirationMinutes(), null, "GET", + secretKey); + } + } else { + return baseStore.generateTemporaryDownloadUrl(auxiliaryTag, auxiliaryType, auxiliaryFileName); + } + } + + int getUrlExpirationMinutes() { + String optionValue = System.getProperty("dataverse.files." + this.driverId + ".url-expiration-minutes"); + if (optionValue != null) { + Integer num; + try { + num = Integer.parseInt(optionValue); + } catch (NumberFormatException ex) { + num = null; + } + if (num != null) { + return num; + } + } + return 60; + } + + private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + endpointWithBasePath = JvmSettings.BASE_URI.lookup(this.driverId); + logger.info("base-uri is " + endpointWithBasePath); + if (endpointWithBasePath == null) { + throw new IOException("dataverse.files." + this.driverId + ".base-uri is required"); + } else { + try { + new URI(endpointWithBasePath); + } catch (Exception e) { + logger.warning( + "Trouble interpreting base-url for store: " + this.driverId + " : " + e.getLocalizedMessage()); + throw new IOException("Can't interpret base-url as a URI"); + } + + } + + if (baseStore == null) { + String baseDriverId = getBaseStoreIdFor(driverId); + String fullStorageLocation = null; + String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + + if(dvObject instanceof Dataset) { + baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); + } else { + if (this.getDvObject() != null) { + fullStorageLocation = getStoragePath(); + + // S3 expects :/// + switch (baseDriverType) { + case DataAccess.S3: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" + + fullStorageLocation; + break; + case DataAccess.FILE: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" + + fullStorageLocation; + break; + default: + logger.warning("Not Implemented: RemoteOverlay store with base store type: " + + System.getProperty("dataverse.files." + baseDriverId + ".type")); + throw new IOException("Not implemented"); + } + + } else if (storageLocation != null) { + // ://// + //remoteDriverId:// is removed if coming through directStorageIO + int index = storageLocation.indexOf(DataAccess.SEPARATOR); + if(index > 0) { + storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); + } + //THe base store needs the baseStoreIdentifier and not the relative URL + fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//")); + + switch (baseDriverType) { + case DataAccess.S3: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" + + fullStorageLocation; + break; + case DataAccess.FILE: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" + + fullStorageLocation; + break; + default: + logger.warning("Not Implemented: RemoteOverlay store with base store type: " + + System.getProperty("dataverse.files." + baseDriverId + ".type")); + throw new IOException("Not implemented"); + } + } + baseStore = DataAccess.getDirectStorageIO(fullStorageLocation); + } + if (baseDriverType.contentEquals(DataAccess.S3)) { + ((S3AccessIO) baseStore).setMainDriver(false); + } + } + remoteStoreName = System.getProperty("dataverse.files." + this.driverId + ".remote-store-name"); + try { + remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); + } catch(MalformedURLException mfue) { + logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); + } + } + + //Convenience method to assemble the path, starting with the DOI authority/identifier/, that is needed to create a base store via DataAccess.getDirectStorageIO - the caller has to add the store type specific prefix required. + private String getStoragePath() throws IOException { + String fullStoragePath = dvObject.getStorageIdentifier(); + logger.fine("storageidentifier: " + fullStoragePath); + int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR); + if(driverIndex >=0) { + fullStoragePath = fullStoragePath.substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + } + int suffixIndex = fullStoragePath.indexOf("//"); + if(suffixIndex >=0) { + fullStoragePath = fullStoragePath.substring(0, suffixIndex); + } + if (this.getDvObject() instanceof Dataset) { + fullStoragePath = this.getDataset().getAuthorityForFileStorage() + "/" + + this.getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath; + } else if (this.getDvObject() instanceof DataFile) { + fullStoragePath = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" + + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; + }else if (dvObject instanceof Dataverse) { + throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); + } + logger.fine("fullStoragePath: " + fullStoragePath); + return fullStoragePath; + } + + public CloseableHttpClient getSharedHttpClient() { + if (httpclient == null) { + try { + initHttpPool(); + httpclient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(config).build(); + + } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) { + logger.warning(ex.getMessage()); + } + } + return httpclient; + } + + private void initHttpPool() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException { + if (trustCerts) { + // use the TrustSelfSignedStrategy to allow Self Signed Certificates + SSLContext sslContext; + SSLConnectionSocketFactory connectionFactory; + + sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustAllStrategy()).build(); + // create an SSL Socket Factory to use the SSLContext with the trust self signed + // certificate strategy + // and allow all hosts verifier. + connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); + + Registry registry = RegistryBuilder.create() + .register("https", connectionFactory).build(); + cm = new PoolingHttpClientConnectionManager(registry); + } else { + cm = new PoolingHttpClientConnectionManager(); + } + cm.setDefaultMaxPerRoute(httpConcurrency); + cm.setMaxTotal(httpConcurrency > 20 ? httpConcurrency : 20); + } + + @Override + public void savePath(Path fileSystemPath) throws IOException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: savePath() not implemented in this storage driver."); + + } + + @Override + public void saveInputStream(InputStream inputStream) throws IOException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: saveInputStream() not implemented in this storage driver."); + + } + + @Override + public void saveInputStream(InputStream inputStream, Long filesize) throws IOException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: saveInputStream(InputStream, Long) not implemented in this storage driver."); + + } + + protected static boolean isValidIdentifier(String driverId, String storageId) { + String urlPath = storageId.substring(storageId.lastIndexOf("//") + 2); + String baseUri = System.getProperty("dataverse.files." + driverId + ".base-uri"); + try { + URI absoluteURI = new URI(baseUri + "/" + urlPath); + if(!absoluteURI.normalize().toString().startsWith(baseUri)) { + logger.warning("storageidentifier doesn't start with " + driverId + "'s base-url: " + storageId); + return false; + } + } catch(URISyntaxException use) { + logger.warning("Could not interpret storageidentifier in remote store " + driverId + " : " + storageId); + logger.warning(use.getLocalizedMessage()); + return false; + } + return true; + } + + public static String getBaseStoreIdFor(String driverId) { + return System.getProperty("dataverse.files." + driverId + ".base-store"); + } + + @Override + public List cleanUp(Predicate filter, boolean dryRun) throws IOException { + return baseStore.cleanUp(filter, dryRun); + } + + public static void main(String[] args) { + System.out.println("Running the main method"); + if (args.length > 0) { + System.out.printf("List of arguments: {}", Arrays.toString(args)); + } + System.setProperty("dataverse.files.globus.base-uri", "12345/top"); + System.out.println("Valid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); + logger.info(JvmSettings.BASE_URI.lookup("globus")); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java index 66c6a4cc2ee..ee2b6779cba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java @@ -65,7 +65,7 @@ public class RemoteOverlayAccessIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); private StorageIO baseStore = null; - private String urlPath = null; + private String path = null; private String baseUrl = null; private static HttpClientContext localContext = HttpClientContext.create(); @@ -83,10 +83,10 @@ public RemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) this.setIsLocalFile(false); configureStores(req, driverId, null); logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); - urlPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); - validatePath(urlPath); + path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); + validatePath(path); - logger.fine("Base URL: " + urlPath); + logger.fine("Base URL: " + path); } public RemoteOverlayAccessIO(String storageLocation, String driverId) throws IOException { @@ -94,14 +94,14 @@ public RemoteOverlayAccessIO(String storageLocation, String driverId) throws IOE this.setIsLocalFile(false); configureStores(null, driverId, storageLocation); - urlPath = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); - validatePath(urlPath); - logger.fine("Base URL: " + urlPath); + path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); + validatePath(path); + logger.fine("Base URL: " + path); } - private void validatePath(String path) throws IOException { + private void validatePath(String relPath) throws IOException { try { - URI absoluteURI = new URI(baseUrl + "/" + urlPath); + URI absoluteURI = new URI(baseUrl + "/" + relPath); if(!absoluteURI.normalize().toString().startsWith(baseUrl)) { throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s base-url"); } @@ -182,7 +182,7 @@ public void open(DataAccessOption... options) throws IOException { private long getSizeFromHttpHeader() { long size = -1; - HttpHead head = new HttpHead(baseUrl + "/" + urlPath); + HttpHead head = new HttpHead(baseUrl + "/" + path); try { CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); @@ -224,12 +224,12 @@ public InputStream getInputStream() throws IOException { break; default: logger.warning("Response from " + get.getURI().toString() + " was " + code); - throw new IOException("Cannot retrieve: " + baseUrl + "/" + urlPath + " code: " + code); + throw new IOException("Cannot retrieve: " + baseUrl + "/" + path + " code: " + code); } } catch (Exception e) { logger.warning(e.getMessage()); e.printStackTrace(); - throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath + " " + e.getMessage()); + throw new IOException("Error retrieving: " + baseUrl + "/" + path + " " + e.getMessage()); } setChannel(Channels.newChannel(super.getInputStream())); @@ -260,13 +260,13 @@ public void delete() throws IOException { throw new IOException("Direct Access IO must be used to permanently delete stored file objects"); } try { - HttpDelete del = new HttpDelete(baseUrl + "/" + urlPath); + HttpDelete del = new HttpDelete(baseUrl + "/" + path); CloseableHttpResponse response = getSharedHttpClient().execute(del, localContext); try { int code = response.getStatusLine().getStatusCode(); switch (code) { case 200: - logger.fine("Sent DELETE for " + baseUrl + "/" + urlPath); + logger.fine("Sent DELETE for " + baseUrl + "/" + path); default: logger.fine("Response from DELETE on " + del.getURI().toString() + " was " + code); } @@ -275,7 +275,7 @@ public void delete() throws IOException { } } catch (Exception e) { logger.warning(e.getMessage()); - throw new IOException("Error deleting: " + baseUrl + "/" + urlPath); + throw new IOException("Error deleting: " + baseUrl + "/" + path); } @@ -420,9 +420,9 @@ public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliary if (auxiliaryTag == null) { String secretKey = System.getProperty("dataverse.files." + this.driverId + ".secret-key"); if (secretKey == null) { - return baseUrl + "/" + urlPath; + return baseUrl + "/" + path; } else { - return UrlSignerUtil.signUrl(baseUrl + "/" + urlPath, getUrlExpirationMinutes(), null, "GET", + return UrlSignerUtil.signUrl(baseUrl + "/" + path, getUrlExpirationMinutes(), null, "GET", secretKey); } } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 86130f5146e..4fb895f5adc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -47,6 +47,8 @@ public enum JvmSettings { // FILES SETTINGS SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), + FILES(SCOPE_FILES), + BASE_URI(FILES, "base-uri"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), From 2c4c927cc8f20d53ee1aaaf1979b793ee53f9b3f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 5 May 2023 14:13:02 -0400 Subject: [PATCH 0004/1112] add token --- .../dataaccess/GlobusOverlayAccessIO.java | 171 +++++++++++------- .../iq/dataverse/settings/JvmSettings.java | 1 + 2 files changed, 111 insertions(+), 61 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index fe62e25ad6f..050b9ddc176 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.io.FileNotFoundException; import java.io.IOException; @@ -31,6 +32,7 @@ import java.util.logging.Logger; import org.apache.commons.lang3.NotImplementedException; +import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; @@ -49,6 +51,7 @@ import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.util.EntityUtils; +import javax.json.JsonObject; import javax.net.ssl.SSLContext; /** @@ -58,8 +61,8 @@ /* * Globus Overlay Driver * - * StorageIdentifier format: - * :///// + * StorageIdentifier format: :///// */ public class GlobusOverlayAccessIO extends StorageIO { @@ -68,6 +71,7 @@ public class GlobusOverlayAccessIO extends StorageIO { private StorageIO baseStore = null; private String path = null; private String endpointWithBasePath = null; + private String globusToken = null; private static HttpClientContext localContext = HttpClientContext.create(); private PoolingHttpClientConnectionManager cm = null; @@ -86,7 +90,7 @@ public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); validatePath(path); - + logger.fine("Relative path: " + path); } @@ -99,18 +103,17 @@ public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOE validatePath(path); logger.fine("Relative path: " + path); } - + private void validatePath(String relPath) throws IOException { try { URI absoluteURI = new URI(endpointWithBasePath + "/" + relPath); - if(!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { + if (!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s endpoint/basePath"); } - } catch(URISyntaxException use) { + } catch (URISyntaxException use) { throw new IOException("Could not interpret storageidentifier in remote store " + this.driverId); } - } - + } @Override public void open(DataAccessOption... options) throws IOException { @@ -181,37 +184,64 @@ public void open(DataAccessOption... options) throws IOException { } } + // Call the Globus API to get the file size private long getSizeFromGlobus() { - throw new NotImplementedException(); - /* - long size = -1; - HttpHead head = new HttpHead(endpointWithBasePath + "/" + path); + // Construct Globus URL + URI absoluteURI = null; try { - CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); - - try { - int code = response.getStatusLine().getStatusCode(); - logger.fine("Response for HEAD: " + code); - switch (code) { - case 200: - Header[] headers = response.getHeaders(HTTP.CONTENT_LEN); - logger.fine("Num headers: " + headers.length); - String sizeString = response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); - logger.fine("Content-Length: " + sizeString); - size = Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); - logger.fine("Found file size: " + size); - break; - default: - logger.warning("Response from " + head.getURI().toString() + " was " + code); - } - } finally { - EntityUtils.consume(response.getEntity()); + int filenameStart = path.lastIndexOf("/") + 1; + int pathStart = endpointWithBasePath.indexOf("/") + 1; + + String directoryPath = (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + + path.substring(0, filenameStart); + String filename = path.substring(filenameStart); + String endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart - 1) : endpointWithBasePath; + + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + path + "&filter=name:" + filename); + HttpGet get = new HttpGet(absoluteURI); + String token = JvmSettings.GLOBUS_TOKEN.lookup(driverId); + logger.info("Token is " + token); + get.addHeader("Authorization", "Bearer " + token); + CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); + if (response.getStatusLine().getStatusCode() == 200) { + //Get reponse as string + String responseString = EntityUtils.toString(response.getEntity()); + logger.fine("Response from " + get.getURI().toString() + " is: " + responseString); + JsonObject responseJson = JsonUtil.getJsonObject(responseString); + return (long) responseJson.getInt("size"); + } else { + logger.warning("Response from " + get.getURI().toString() + " was " + response.getStatusLine().getStatusCode()); + logger.info(EntityUtils.toString(response.getEntity())); } + } catch (URISyntaxException e) { + // Should have been caught in validatePath + e.printStackTrace(); + } catch (ClientProtocolException e) { + // TODO Auto-generated catch block + e.printStackTrace(); } catch (IOException e) { - logger.warning(e.getMessage()); + // TODO Auto-generated catch block + e.printStackTrace(); } - return size; - */ + return -1; + + /* + * long size = -1; HttpHead head = new HttpHead(endpointWithBasePath + "/" + + * path); try { CloseableHttpResponse response = + * getSharedHttpClient().execute(head, localContext); + * + * try { int code = response.getStatusLine().getStatusCode(); + * logger.fine("Response for HEAD: " + code); switch (code) { case 200: Header[] + * headers = response.getHeaders(HTTP.CONTENT_LEN); logger.fine("Num headers: " + * + headers.length); String sizeString = + * response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); + * logger.fine("Content-Length: " + sizeString); size = + * Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); + * logger.fine("Found file size: " + size); break; default: + * logger.warning("Response from " + head.getURI().toString() + " was " + code); + * } } finally { EntityUtils.consume(response.getEntity()); } } catch + * (IOException e) { logger.warning(e.getMessage()); } return size; + */ } @Override @@ -360,8 +390,9 @@ public String getStorageLocation() throws IOException { String fullStorageLocation = dvObject.getStorageIdentifier(); logger.fine("storageidentifier: " + fullStorageLocation); int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR); - if(driverIndex >=0) { - fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + if (driverIndex >= 0) { + fullStorageLocation = fullStorageLocation + .substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); } if (this.getDvObject() instanceof Dataset) { throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject"); @@ -411,7 +442,7 @@ public boolean downloadRedirectEnabled() { } return false; } - + public boolean downloadRedirectEnabled(String auxObjectTag) { return baseStore.downloadRedirectEnabled(auxObjectTag); } @@ -469,9 +500,10 @@ private void configureStores(DataAccessRequest req, String driverId, String stor if (baseStore == null) { String baseDriverId = getBaseStoreIdFor(driverId); String fullStorageLocation = null; - String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); - - if(dvObject instanceof Dataset) { + String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + + if (dvObject instanceof Dataset) { baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); } else { if (this.getDvObject() != null) { @@ -486,8 +518,8 @@ private void configureStores(DataAccessRequest req, String driverId, String stor break; case DataAccess.FILE: fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" - + fullStorageLocation; + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + + "/" + fullStorageLocation; break; default: logger.warning("Not Implemented: RemoteOverlay store with base store type: " @@ -497,12 +529,12 @@ private void configureStores(DataAccessRequest req, String driverId, String stor } else if (storageLocation != null) { // ://// - //remoteDriverId:// is removed if coming through directStorageIO + // remoteDriverId:// is removed if coming through directStorageIO int index = storageLocation.indexOf(DataAccess.SEPARATOR); - if(index > 0) { + if (index > 0) { storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); } - //THe base store needs the baseStoreIdentifier and not the relative URL + // THe base store needs the baseStoreIdentifier and not the relative URL fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//")); switch (baseDriverType) { @@ -513,8 +545,8 @@ private void configureStores(DataAccessRequest req, String driverId, String stor break; case DataAccess.FILE: fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" - + fullStorageLocation; + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + + "/" + fullStorageLocation; break; default: logger.warning("Not Implemented: RemoteOverlay store with base store type: " @@ -530,37 +562,41 @@ private void configureStores(DataAccessRequest req, String driverId, String stor } remoteStoreName = System.getProperty("dataverse.files." + this.driverId + ".remote-store-name"); try { - remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); - } catch(MalformedURLException mfue) { + remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); + } catch (MalformedURLException mfue) { logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); } } - //Convenience method to assemble the path, starting with the DOI authority/identifier/, that is needed to create a base store via DataAccess.getDirectStorageIO - the caller has to add the store type specific prefix required. + // Convenience method to assemble the path, starting with the DOI + // authority/identifier/, that is needed to create a base store via + // DataAccess.getDirectStorageIO - the caller has to add the store type specific + // prefix required. private String getStoragePath() throws IOException { String fullStoragePath = dvObject.getStorageIdentifier(); logger.fine("storageidentifier: " + fullStoragePath); int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR); - if(driverIndex >=0) { - fullStoragePath = fullStoragePath.substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + if (driverIndex >= 0) { + fullStoragePath = fullStoragePath + .substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); } int suffixIndex = fullStoragePath.indexOf("//"); - if(suffixIndex >=0) { - fullStoragePath = fullStoragePath.substring(0, suffixIndex); + if (suffixIndex >= 0) { + fullStoragePath = fullStoragePath.substring(0, suffixIndex); } if (this.getDvObject() instanceof Dataset) { fullStoragePath = this.getDataset().getAuthorityForFileStorage() + "/" + this.getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath; } else if (this.getDvObject() instanceof DataFile) { fullStoragePath = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" - + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; - }else if (dvObject instanceof Dataverse) { + + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; + } else if (dvObject instanceof Dataverse) { throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); } logger.fine("fullStoragePath: " + fullStoragePath); return fullStoragePath; } - + public CloseableHttpClient getSharedHttpClient() { if (httpclient == null) { try { @@ -622,11 +658,11 @@ protected static boolean isValidIdentifier(String driverId, String storageId) { String baseUri = System.getProperty("dataverse.files." + driverId + ".base-uri"); try { URI absoluteURI = new URI(baseUri + "/" + urlPath); - if(!absoluteURI.normalize().toString().startsWith(baseUri)) { + if (!absoluteURI.normalize().toString().startsWith(baseUri)) { logger.warning("storageidentifier doesn't start with " + driverId + "'s base-url: " + storageId); return false; } - } catch(URISyntaxException use) { + } catch (URISyntaxException use) { logger.warning("Could not interpret storageidentifier in remote store " + driverId + " : " + storageId); logger.warning(use.getLocalizedMessage()); return false; @@ -642,14 +678,27 @@ public static String getBaseStoreIdFor(String driverId) { public List cleanUp(Predicate filter, boolean dryRun) throws IOException { return baseStore.cleanUp(filter, dryRun); } - + public static void main(String[] args) { System.out.println("Running the main method"); if (args.length > 0) { System.out.printf("List of arguments: {}", Arrays.toString(args)); } - System.setProperty("dataverse.files.globus.base-uri", "12345/top"); + System.setProperty("dataverse.files.globus.base-uri", "2791b83e-b989-47c5-a7fa-ce65fd949522"); System.out.println("Valid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); + System.setProperty("dataverse.files.globus.globus-token","Mjc5MWI4M2UtYjk4OS00N2M1LWE3ZmEtY2U2NWZkOTQ5NTIyOlprRmxGejNTWDlkTVpUNk92ZmVJaFQyTWY0SDd4cXBoTDNSS29vUmRGVlE9"); + System.setProperty("dataverse.files.globus.base-store","file"); + System.setProperty("dataverse.files.file.type", + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + System.setProperty("dataverse.files.file.directory", "/tmp/files"); logger.info(JvmSettings.BASE_URI.lookup("globus")); + try { + GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234//2791b83e-b989-47c5-a7fa-ce65fd949522/hdc1/image001.mrc", "globus"); + logger.info("Size is " + gsio.getSizeFromGlobus()); + + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 4fb895f5adc..eac8411c939 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -49,6 +49,7 @@ public enum JvmSettings { FILES_DIRECTORY(SCOPE_FILES, "directory"), FILES(SCOPE_FILES), BASE_URI(FILES, "base-uri"), + GLOBUS_TOKEN(FILES, "globus-token"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), From 3c3378f5a3bf39eff13a582d0dc52a2a5549af8f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 9 May 2023 14:53:25 -0400 Subject: [PATCH 0005/1112] start refactoring Globus bean --- .../dataaccess/GlobusOverlayAccessIO.java | 28 +++++++++----- .../iq/dataverse/globus/AccessToken.java | 2 +- .../dataverse/globus/GlobusServiceBean.java | 37 +++++++++++-------- 3 files changed, 41 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 050b9ddc176..0d7c5458e14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -5,6 +5,8 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.globus.AccessToken; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; @@ -28,10 +30,8 @@ import java.util.Arrays; import java.util.List; import java.util.function.Predicate; -import java.util.logging.Level; import java.util.logging.Logger; -import org.apache.commons.lang3.NotImplementedException; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; @@ -83,6 +83,8 @@ public class GlobusOverlayAccessIO extends StorageIO { private static boolean trustCerts = false; private int httpConcurrency = 4; + private String globusAccessToken = null; + public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); this.setIsLocalFile(false); @@ -190,18 +192,19 @@ private long getSizeFromGlobus() { URI absoluteURI = null; try { int filenameStart = path.lastIndexOf("/") + 1; - int pathStart = endpointWithBasePath.indexOf("/") + 1; - - String directoryPath = (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + int pathStart = endpointWithBasePath.indexOf("/"); +logger.info("endpointWithBasePath: " + endpointWithBasePath); + String directoryPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + path.substring(0, filenameStart); + logger.info("directoryPath: " + directoryPath); String filename = path.substring(filenameStart); String endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart - 1) : endpointWithBasePath; - absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + path + "&filter=name:" + filename); + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + directoryPath + "&filter=name:" + filename); HttpGet get = new HttpGet(absoluteURI); - String token = JvmSettings.GLOBUS_TOKEN.lookup(driverId); - logger.info("Token is " + token); - get.addHeader("Authorization", "Bearer " + token); + + logger.info("Token is " + globusAccessToken); + get.addHeader("Authorization", "Bearer " + globusAccessToken); CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); if (response.getStatusLine().getStatusCode() == 200) { //Get reponse as string @@ -482,6 +485,8 @@ int getUrlExpirationMinutes() { } private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + AccessToken accessToken = GlobusServiceBean.getClientToken(JvmSettings.GLOBUS_TOKEN.lookup(driverId)); + globusAccessToken = accessToken.getOtherTokens().get(0).getAccessToken(); endpointWithBasePath = JvmSettings.BASE_URI.lookup(this.driverId); logger.info("base-uri is " + endpointWithBasePath); if (endpointWithBasePath == null) { @@ -692,8 +697,11 @@ public static void main(String[] args) { DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); System.setProperty("dataverse.files.file.directory", "/tmp/files"); logger.info(JvmSettings.BASE_URI.lookup("globus")); + + + try { - GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234//2791b83e-b989-47c5-a7fa-ce65fd949522/hdc1/image001.mrc", "globus"); + GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234///hdc1/image001.mrc", "globus"); logger.info("Size is " + gsio.getSizeFromGlobus()); } catch (IOException e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java index 877fc68e4a1..c93e2c6aa94 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java @@ -46,7 +46,7 @@ String getRefreshToken() { return refreshToken; } - ArrayList getOtherTokens() { + public ArrayList getOtherTokens() { return otherTokens; } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 9d80c5cc280..c2137dd1f47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -167,7 +167,8 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin public void deletePermision(String ruleId, Logger globusLogger) throws MalformedURLException { if (ruleId.length() > 0) { - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); + globusLogger.info("Start deleting permissions."); String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); @@ -264,15 +265,21 @@ public GlobusTask getTask(AccessToken clientTokenUser, String taskId, Logger glo return task; } - public AccessToken getClientToken() throws MalformedURLException { - String globusBasicToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, ""); - URL url = new URL( - "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); - - MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null); + public static AccessToken getClientToken(String globusBasicToken) { + URL url; AccessToken clientTokenUser = null; - if (result.status == 200) { - clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + + try { + url = new URL( + "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); + + MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null); + if (result.status == 200) { + clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + } + } catch (MalformedURLException e) { + // On a statically defined URL... + e.printStackTrace(); } return clientTokenUser; } @@ -306,7 +313,7 @@ public AccessToken getAccessToken(HttpServletRequest origRequest, String globusB } - public MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, + public static MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, String jsonString) { String str = null; HttpURLConnection connection = null; @@ -359,7 +366,7 @@ public MakeRequestResponse makeRequest(URL url, String authType, String authCode } - private StringBuilder readResultJson(InputStream in) { + private static StringBuilder readResultJson(InputStream in) { StringBuilder sb = null; try { @@ -378,7 +385,7 @@ private StringBuilder readResultJson(InputStream in) { return sb; } - private T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { + private static T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { if (sb != null) { Gson gson = null; if (namingPolicy) { @@ -420,7 +427,7 @@ public String getDirectory(String datasetId) { } - class MakeRequestResponse { + static class MakeRequestResponse { public String jsonResponse; public int status; @@ -451,7 +458,7 @@ public boolean giveGlobusPublicPermissions(String datasetId) if (globusEndpoint.equals("") || globusBasicToken.equals("")) { return false; } - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); if (clientTokenUser == null) { logger.severe("Cannot get client token "); return false; @@ -908,7 +915,7 @@ private GlobusTask globusStatusCheck(String taskId, Logger globusLogger) throws try { globusLogger.info("checking globus transfer task " + taskId); Thread.sleep(pollingInterval * 1000); - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); // success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); task = getTask(clientTokenUser, taskId, globusLogger); if (task != null) { From f14b75454a524fd8816d6f5367b0e15fbd0ded92 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 9 May 2023 14:53:56 -0400 Subject: [PATCH 0006/1112] enable globus store main() to run - will revert --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e5b191f0ed7..4926f59f8a0 100644 --- a/pom.xml +++ b/pom.xml @@ -184,7 +184,7 @@ org.glassfish jakarta.json - provided + From 502e660fe342939a617edd6d17a425c83b5a269b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 12 May 2023 13:22:46 -0400 Subject: [PATCH 0007/1112] suppress thumb generation after a failure --- .../edu/harvard/iq/dataverse/DvObject.java | 14 +++++ .../dataaccess/ImageThumbConverter.java | 55 ++++++++++++------- .../dataverse/ingest/IngestServiceBean.java | 4 +- .../V5.13.0.1__9506-track-thumb-failures.sql | 1 + 4 files changed, 54 insertions(+), 20 deletions(-) create mode 100644 src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 854888737ee..6cb3816e3f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -181,7 +181,20 @@ public boolean isPreviewImageAvailable() { public void setPreviewImageAvailable(boolean status) { this.previewImageAvailable = status; } + + /** Indicates whether a previous attempt to generate a preview image has failed, regardless of size. + * If so, we won't want to try again every time the preview/thumbnail is requested for a view. + */ + private boolean previewsHaveFailed; + + public boolean isPreviewsHaveFailed() { + return previewsHaveFailed; + } + public void setPreviewsHaveFailed(boolean previewsHaveFailed) { + this.previewsHaveFailed = previewsHaveFailed; + } + public Timestamp getModificationTime() { return modificationTime; } @@ -462,6 +475,7 @@ public void setStorageIdentifier(String storageIdentifier) { */ public abstract boolean isAncestorOf( DvObject other ); + @OneToMany(mappedBy = "definitionPoint",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true) List roleAssignments; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 2b4aed3a9a5..eb08646454d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -48,6 +48,7 @@ import java.nio.channels.WritableByteChannel; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.io.IOUtils; //import org.primefaces.util.Base64; @@ -110,15 +111,24 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (isThumbnailCached(storageIO, size)) { + logger.fine("Found cached thumbnail for " + file.getId()); return true; } - logger.fine("Checking for thumbnail, file type: " + file.getContentType()); - - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - return generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - return generatePDFThumbnail(storageIO, size); + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate thumbnail, file id: " + file.getId()); + // Don't try to generate if there have been failures: + if (!file.isPreviewsHaveFailed()) { + boolean thumbnailGenerated = false; + if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { + thumbnailGenerated = generateImageThumbnail(storageIO, size); + } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { + thumbnailGenerated = generatePDFThumbnail(storageIO, size); + } + if (!thumbnailGenerated) { + logger.fine("No thumbnail generated for " + file.getId()); + file.setPreviewGenerationHasPreviouslyFailed(true); + } + return thumbnailGenerated; } return false; @@ -436,20 +446,27 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (cachedThumbnailChannel == null) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); - // try to generate, if not available: - boolean generated = false; - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - generated = generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - generated = generatePDFThumbnail(storageIO, size); - } + // try to generate, if not available and hasn't failed before + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate base64 thumbnail, file id: " + file.getId()); + if (!file.isPreviewsHaveFailed()) { + boolean generated = false; + if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { + generated = generateImageThumbnail(storageIO, size); + } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { + generated = generatePDFThumbnail(storageIO, size); + } - if (generated) { - // try to open again: - try { - cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); - } catch (Exception ioEx) { - cachedThumbnailChannel = null; + if (!generated) { + // Record failure + logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); + file.setPreviewGenerationHasPreviouslyFailed(true); + } else { + // Success - try to open again: + try { + cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + } catch (Exception ioEx) { + cachedThumbnailChannel = null; + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 5a353453fe8..fbe2d7b38ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -292,7 +292,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } catch (IOException ioex) { logger.warning("Failed to save generated file " + generated.toString()); - } + //Shouldn't mark this file as having a preview after this. + dataFile.setPreviewImageAvailable(false); + } } // ... but we definitely want to delete it: diff --git a/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql new file mode 100644 index 00000000000..9b12d27db91 --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql @@ -0,0 +1 @@ +ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS previewshavefailed BOOLEAN DEFAULT FALSE; \ No newline at end of file From 0fea5ccca11b2348429ddfee75e4bafc709c7473 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 12 May 2023 13:25:38 -0400 Subject: [PATCH 0008/1112] refactor error --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index eb08646454d..254c334d655 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -126,7 +126,7 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (!thumbnailGenerated) { logger.fine("No thumbnail generated for " + file.getId()); - file.setPreviewGenerationHasPreviouslyFailed(true); + file.setPreviewsHaveFailed(true); } return thumbnailGenerated; } @@ -459,7 +459,7 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (!generated) { // Record failure logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - file.setPreviewGenerationHasPreviouslyFailed(true); + file.setPreviewsHaveFailed(true); } else { // Success - try to open again: try { From 8f5350ae0df4df60c55ff770259531935cb6ac9b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 10:32:21 -0400 Subject: [PATCH 0009/1112] cache isThumb available --- .../iq/dataverse/ThumbnailServiceWrapper.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 6c8db8c124b..e2bb21c8a4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -49,6 +49,7 @@ public class ThumbnailServiceWrapper implements java.io.Serializable { private Map dvobjectThumbnailsMap = new HashMap<>(); private Map dvobjectViewMap = new HashMap<>(); + private Map hasThumbMap = new HashMap<>(); private String getAssignedDatasetImage(Dataset dataset, int size) { if (dataset == null) { @@ -133,7 +134,7 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) { if ((!((DataFile)result.getEntity()).isRestricted() || permissionsWrapper.hasDownloadFilePermission(result.getEntity())) - && dataFileService.isThumbnailAvailable((DataFile) result.getEntity())) { + && isThumbnailAvailable((DataFile) result.getEntity())) { cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( (DataFile) result.getEntity(), @@ -159,6 +160,13 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) { return null; } + public boolean isThumbnailAvailable(DataFile entity) { + if(!hasThumbMap.containsKey(entity.getId())) { + hasThumbMap.put(entity.getId(), dataFileService.isThumbnailAvailable(entity)); + } + return hasThumbMap.get(entity.getId()); + } + // it's the responsibility of the user - to make sure the search result // passed to this method is of the Dataset type! public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { @@ -295,7 +303,7 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo } } - if (dataFileService.isThumbnailAvailable(thumbnailImageFile)) { + if (isThumbnailAvailable(thumbnailImageFile)) { cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( thumbnailImageFile, size); @@ -323,6 +331,7 @@ public String getDataverseCardImageAsBase64Url(SolrSearchResult result) { public void resetObjectMaps() { dvobjectThumbnailsMap = new HashMap<>(); dvobjectViewMap = new HashMap<>(); + hasThumbMap = new HashMap<>(); } From 8604eef7f470eade8dbf885ed42bc47407db74ff Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 13:22:18 -0400 Subject: [PATCH 0010/1112] set thumb fail column --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 5 ++++- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 196f84b6877..a5822828682 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1127,7 +1127,7 @@ public boolean isThumbnailAvailable (DataFile file) { } // If thumbnails are not even supported for this class of files, - // there's notthing to talk about: + // there's nothing to talk about: if (!FileUtil.isThumbnailSupported(file)) { return false; } @@ -1149,6 +1149,9 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; + } else { + file.setPreviewsHaveFailed(true); + this.save(file); } return false; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 254c334d655..ab9294eea72 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -115,7 +115,7 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s return true; } - logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate thumbnail, file id: " + file.getId()); + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { boolean thumbnailGenerated = false; From aeae8f4ddbb05794c177e9b1d33725e1ed7d7e2f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 13:50:49 -0400 Subject: [PATCH 0011/1112] use thumb wrapper in edit and view files --- src/main/webapp/editFilesFragment.xhtml | 4 ++-- src/main/webapp/file-info-fragment.xhtml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a4e635b8c14..af06b44e3bc 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -360,13 +360,13 @@
- - + #{fileMetadata.label} diff --git a/src/main/webapp/file-info-fragment.xhtml b/src/main/webapp/file-info-fragment.xhtml index 33a8d2c3ca5..3e8e80d51e7 100644 --- a/src/main/webapp/file-info-fragment.xhtml +++ b/src/main/webapp/file-info-fragment.xhtml @@ -28,8 +28,8 @@
- - + From c4ad20bc4b67b93908e60b76a251240f4a6e2540 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 13:49:35 -0400 Subject: [PATCH 0012/1112] add api --- .../edu/harvard/iq/dataverse/api/Admin.java | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index d219339add9..14c556e9caa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2321,4 +2321,26 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur return ok(Json.createObjectBuilder().add(ExternalToolHandler.SIGNED_URL, signedUrl)); } + @DELETE + @Path("/clearThumbnailFailureFlag") + public Response clearThumbnailFailureFlag() { + em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE").executeUpdate(); + return ok("Thumnail Failure Flags cleared."); + } + + @DELETE + @Path("/clearThumbnailFailureFlag/{id}") + public Response clearThumbnailFailureFlagByDatafile(@PathParam("id") String fileId) { + try { + DataFile df = findDataFileOrDie(fileId); + Query deleteQuery = em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE where id = ?"); + deleteQuery.setParameter(1, df.getId()); + deleteQuery.executeUpdate(); + return ok("Thumnail Failure Flag cleared for file id=: " + df.getId() + "."); + } catch (WrappedResponse r) { + logger.info("Could not find file with the id: " + fileId); + return error(Status.BAD_REQUEST, "Could not find file with the id: " + fileId); + } + } + } From 63e98b3b60a4baae98f1f88a282b97694929c443 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:16:47 -0400 Subject: [PATCH 0013/1112] make clearer --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index a5822828682..f41565c9449 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1149,11 +1149,9 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; - } else { - file.setPreviewsHaveFailed(true); - this.save(file); } - + file.setPreviewsHaveFailed(true); + this.save(file); return false; } From 2671cb75effb5425d02b3e874c7525b7833dc533 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:25:58 -0400 Subject: [PATCH 0014/1112] update comment --- src/main/java/edu/harvard/iq/dataverse/DvObject.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 6cb3816e3f1..87619450133 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -182,8 +182,11 @@ public void setPreviewImageAvailable(boolean status) { this.previewImageAvailable = status; } - /** Indicates whether a previous attempt to generate a preview image has failed, regardless of size. - * If so, we won't want to try again every time the preview/thumbnail is requested for a view. + /** + * Indicates whether a previous attempt to generate a preview image has failed, + * regardless of size. This could be due to the file not being accessible, or a + * real failure in generating the thumbnail. In both cases, we won't want to try + * again every time the preview/thumbnail is requested for a view. */ private boolean previewsHaveFailed; From 19db99b1427700c9cc4ad462c0edd017e6dd5799 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:26:28 -0400 Subject: [PATCH 0015/1112] remove setting flag where datafile is not clearly being saved to db --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index ab9294eea72..921faba7989 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -126,7 +126,6 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (!thumbnailGenerated) { logger.fine("No thumbnail generated for " + file.getId()); - file.setPreviewsHaveFailed(true); } return thumbnailGenerated; } @@ -459,7 +458,6 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (!generated) { // Record failure logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - file.setPreviewsHaveFailed(true); } else { // Success - try to open again: try { From 156d025970eeb5223b6fd8343db09cafee057fed Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 1 Jun 2023 15:09:25 -0400 Subject: [PATCH 0016/1112] fix non-merge-able error when recording thumb fail --- .../iq/dataverse/DataFileServiceBean.java | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index f41565c9449..880b2ea7dc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1142,17 +1142,17 @@ public boolean isThumbnailAvailable (DataFile file) { is more important... */ - - if (ImageThumbConverter.isThumbnailAvailable(file)) { - file = this.find(file.getId()); - file.setPreviewImageAvailable(true); - this.save(file); - return true; - } - file.setPreviewsHaveFailed(true); - this.save(file); - return false; + file = this.find(file.getId()); + if (ImageThumbConverter.isThumbnailAvailable(file)) { + file.setPreviewImageAvailable(true); + this.save(file); + return true; + } else { + file.setPreviewsHaveFailed(true); + this.save(file); + return false; + } } From 97aa46cb3e9bd2d424961e68e9d024216740c57f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 13 Jun 2023 16:50:38 -0400 Subject: [PATCH 0017/1112] rename script --- ...humb-failures.sql => V5.13.0.2__9506-track-thumb-failures.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.13.0.1__9506-track-thumb-failures.sql => V5.13.0.2__9506-track-thumb-failures.sql} (100%) diff --git a/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V5.13.0.2__9506-track-thumb-failures.sql similarity index 100% rename from src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql rename to src/main/resources/db/migration/V5.13.0.2__9506-track-thumb-failures.sql From dbc36c9d938571a5b61156611c445d266fbafe76 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 13 Jun 2023 17:06:19 -0400 Subject: [PATCH 0018/1112] refactor - remove duplicate code --- .../dataaccess/ImageThumbConverter.java | 29 ++++++------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 921faba7989..fb0785ffd7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -114,7 +114,11 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s logger.fine("Found cached thumbnail for " + file.getId()); return true; } + return generateThumbnail(storageIO, size); + } + + private static boolean generateThumbnail(StorageIO storageIO, int size) { logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { @@ -131,7 +135,6 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } return false; - } // Note that this method works on ALL file types for which thumbnail @@ -446,25 +449,11 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); // try to generate, if not available and hasn't failed before - logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate base64 thumbnail, file id: " + file.getId()); - if (!file.isPreviewsHaveFailed()) { - boolean generated = false; - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - generated = generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - generated = generatePDFThumbnail(storageIO, size); - } - - if (!generated) { - // Record failure - logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - } else { - // Success - try to open again: - try { - cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); - } catch (Exception ioEx) { - cachedThumbnailChannel = null; - } + if(generateThumbnail(storageIO, size)) { + try { + cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + } catch (Exception ioEx) { + cachedThumbnailChannel = null; } } From 9c809c400d7e71a5cd682a892a20aa0dfa21c8ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Haarla=CC=88nder?= Date: Wed, 14 Jun 2023 10:03:23 +0200 Subject: [PATCH 0019/1112] #IQSS/3818 Delete temp thumbnail files --- .../dataverse/dataaccess/ImageThumbConverter.java | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 2b4aed3a9a5..16003f6f32b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -195,6 +195,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s // will run the ImageMagick on it, and will save its output in another temp // file, and will save it as an "auxiliary" file via the driver. boolean tempFilesRequired = false; + File tempFile = null; try { Path pdfFilePath = storageIO.getFileSystemPath(); @@ -222,7 +223,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s return false; } - File tempFile; + FileChannel tempFileChannel = null; try { tempFile = File.createTempFile("tempFileToRescale", ".tmp"); @@ -254,10 +255,14 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s try { logger.fine("attempting to save generated pdf thumbnail, as AUX file " + THUMBNAIL_SUFFIX + size); storageIO.savePathAsAux(Paths.get(imageThumbFileName), THUMBNAIL_SUFFIX + size); + } catch (IOException ioex) { logger.warning("failed to save generated pdf thumbnail, as AUX file " + THUMBNAIL_SUFFIX + size + "!"); return false; } + finally { + tempFile.delete(); + } } return true; @@ -353,12 +358,18 @@ private static boolean generateImageThumbnailFromInputStream(StorageIO if (tempFileRequired) { storageIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), THUMBNAIL_SUFFIX + size); + } } catch (Exception ioex) { logger.warning("Failed to rescale and/or save the image: " + ioex.getMessage()); return false; } + finally { + if(tempFileRequired) { + tempFile.delete(); + } + } return true; From ffad284142f64885a3ab748878a86e555a77d300 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Haarla=CC=88nder?= Date: Wed, 14 Jun 2023 10:50:34 +0200 Subject: [PATCH 0020/1112] #IQSS/3818 Delete temp thumbnail files --- .../iq/dataverse/dataaccess/ImageThumbConverter.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 16003f6f32b..134ae20de87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -261,7 +261,10 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s return false; } finally { - tempFile.delete(); + try { + tempFile.delete(); + } + catch (Exception e) {} } } @@ -367,7 +370,10 @@ private static boolean generateImageThumbnailFromInputStream(StorageIO } finally { if(tempFileRequired) { - tempFile.delete(); + try { + tempFile.delete(); + } + catch (Exception e) {} } } From 0c8972304a43c25ed1de1c5cc6cc1c09ef419948 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 14 Jun 2023 10:30:05 -0400 Subject: [PATCH 0021/1112] try ds logos as url requests --- .../iq/dataverse/ThumbnailServiceWrapper.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index e2bb21c8a4c..66f79472178 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.api.Datasets; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; @@ -12,7 +13,8 @@ import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.util.FileUtil; -import java.io.File; +import edu.harvard.iq.dataverse.util.SystemConfig; + import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; @@ -21,6 +23,8 @@ import java.util.Base64; import java.util.HashMap; import java.util.Map; +import java.util.logging.Logger; + import javax.ejb.EJB; import javax.enterprise.context.RequestScoped; import javax.faces.view.ViewScoped; @@ -36,6 +40,9 @@ @RequestScoped @Named public class ThumbnailServiceWrapper implements java.io.Serializable { + + private static final Logger logger = Logger.getLogger(ThumbnailServiceWrapper.class.getCanonicalName()); + @Inject PermissionsWrapper permissionsWrapper; @EJB @@ -214,7 +221,13 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo this.dvobjectThumbnailsMap.put(datasetId, ""); return null; } + + String url = SystemConfig.getDataverseSiteUrlStatic() + "/datasets/" + dataset.getId() + "/logo"; + logger.fine("getDatasetCardImageAsBase64Url: " + url); + this.dvobjectThumbnailsMap.put(datasetId,url); + return url; +/* String cardImageUrl = null; StorageIO dataAccess = null; @@ -320,6 +333,7 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo //logger.info("dataset id " + result.getEntityId() + ", returning " + cardImageUrl); return cardImageUrl; + */ } // it's the responsibility of the user - to make sure the search result From dc4b6ae5201af228b1b484c6dd430713f8728ccc Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 14 Jun 2023 17:19:41 -0400 Subject: [PATCH 0022/1112] set the datasetid for search cards --- .../java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 66f79472178..4c3778527d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -194,6 +194,7 @@ public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { return null; } Dataset dataset = (Dataset)result.getEntity(); + dataset.setId(result.getEntityId()); Long versionId = result.getDatasetVersionId(); From 546cfdf2048158320e76a9345e9ebc3caf7ca6c2 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 1 Jun 2023 15:09:25 -0400 Subject: [PATCH 0023/1112] fix non-merge-able error when recording thumb fail --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 880b2ea7dc4..ec12480d28d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1148,11 +1148,10 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; - } else { - file.setPreviewsHaveFailed(true); - this.save(file); - return false; } + file.setPreviewsHaveFailed(true); + this.save(file); + return false; } From d3a48dffdfaa56bba065b3c36a2b6469e4227c33 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 14 Jun 2023 17:44:02 -0400 Subject: [PATCH 0024/1112] typo --- .../java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 4c3778527d7..8dda91fd6a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -223,7 +223,7 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo return null; } - String url = SystemConfig.getDataverseSiteUrlStatic() + "/datasets/" + dataset.getId() + "/logo"; + String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; logger.fine("getDatasetCardImageAsBase64Url: " + url); this.dvobjectThumbnailsMap.put(datasetId,url); return url; From f505428f12a5ead774642837bdb871deda34ee27 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 13:13:01 -0400 Subject: [PATCH 0025/1112] only send url if thumb should exist --- .../iq/dataverse/ThumbnailServiceWrapper.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 8dda91fd6a3..19c53ffa77e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.api.Datasets; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail; @@ -222,6 +223,20 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo this.dvobjectThumbnailsMap.put(datasetId, ""); return null; } + DataFile thumbnailFile = dataset.getThumbnailFile(); + + if (thumbnailFile == null) { + thumbnailFile = DatasetUtil.attemptToAutomaticallySelectThumbnailFromDataFiles(dataset, null); + if (thumbnailFile == null) { + logger.fine("Dataset (id :" + dataset.getId() + ") does not have a logo available that could be selected automatically."); + return null; + } + } + if (thumbnailFile.isRestricted()) { + logger.fine("Dataset (id :" + dataset.getId() + ") has a logo the user selected but the file must have later been restricted. Returning null."); + return null; + } + String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; logger.fine("getDatasetCardImageAsBase64Url: " + url); From 2d177a60fe67df26bafad35cf237e048a21545ee Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 15:08:15 -0400 Subject: [PATCH 0026/1112] use inputStream.transferTo --- .../dataaccess/ImageThumbConverter.java | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index fb0785ffd7b..bd87c5541a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -223,30 +223,32 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s } if (tempFilesRequired) { - ReadableByteChannel pdfFileChannel; - + //ReadableByteChannel pdfFileChannel; + InputStream inputStream = null; try { storageIO.open(); - //inputStream = storageIO.getInputStream(); - pdfFileChannel = storageIO.getReadChannel(); + inputStream = storageIO.getInputStream(); + //pdfFileChannel = storageIO.getReadChannel(); } catch (Exception ioex) { logger.warning("caught Exception trying to open an input stream for " + storageIO.getDataFile().getStorageIdentifier()); return false; } File tempFile; - FileChannel tempFileChannel = null; + OutputStream outputStream = null; + //FileChannel tempFileChannel = null; try { tempFile = File.createTempFile("tempFileToRescale", ".tmp"); - tempFileChannel = new FileOutputStream(tempFile).getChannel(); + outputStream = new FileOutputStream(tempFile); + inputStream.transferTo(outputStream); - tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); + //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { logger.warning("GenerateImageThumb: failed to save pdf bytes in a temporary file."); return false; } finally { - IOUtils.closeQuietly(tempFileChannel); - IOUtils.closeQuietly(pdfFileChannel); + IOUtils.closeQuietly(inputStream); + IOUtils.closeQuietly(outputStream); } sourcePdfFile = tempFile; } From 6540b5da0966addffa3a0a6a9d7e67735f89e237 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 15:42:29 -0400 Subject: [PATCH 0027/1112] add debug --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index bd87c5541a5..4a2b8ea0e6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -240,7 +240,8 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s try { tempFile = File.createTempFile("tempFileToRescale", ".tmp"); outputStream = new FileOutputStream(tempFile); - inputStream.transferTo(outputStream); + long sz = inputStream.transferTo(outputStream); + logger.info(" wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { @@ -763,7 +764,7 @@ public static String generatePDFThumbnailFromFile(String fileLocation, int size) try { fileSize = new File(fileLocation).length(); } catch (Exception ex) { - // + logger.warning("Can't open file: " + fileLocation); } if (fileSize == 0 || fileSize > sizeLimit) { From e202d0abc7395fe85218745510b32ade9b6ca770 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 16:15:58 -0400 Subject: [PATCH 0028/1112] more debug --- .../iq/dataverse/dataaccess/ImageThumbConverter.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 4a2b8ea0e6d..3033269f3bc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -196,6 +196,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s // We rely on ImageMagick to convert PDFs; so if it's not installed, // better give up right away: if (!isImageMagickInstalled()) { + logger.info("Couldn't find IM"); return false; } @@ -218,12 +219,15 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s tempFilesRequired = true; } catch (IOException ioex) { + logger.warning(ioex.getMessage()); + ioex.printStackTrace(); // this on the other hand is likely a fatal condition :( return false; } if (tempFilesRequired) { //ReadableByteChannel pdfFileChannel; + logger.info("Creating temp file"); InputStream inputStream = null; try { storageIO.open(); @@ -241,7 +245,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s tempFile = File.createTempFile("tempFileToRescale", ".tmp"); outputStream = new FileOutputStream(tempFile); long sz = inputStream.transferTo(outputStream); - logger.info(" wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); + logger.info("Wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { From b9cd2bbf0c42fb4e7aada29d7cea817c195ca75d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 20 Jun 2023 10:22:05 -0400 Subject: [PATCH 0029/1112] include failed preview flag in queries --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 439e4b17ed4..0bd0a01aef1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -762,7 +762,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND df.id = o.id " + "AND fm.datasetversion_id = dv.id " + "AND fm.datafile_id = df.id " - // + "AND o.previewImageAvailable = false " + + "AND o.previewshavefailed = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + "AND df.contenttype LIKE 'image/%' " @@ -796,7 +796,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND df.id = o.id " + "AND fm.datasetversion_id = dv.id " + "AND fm.datafile_id = df.id " - // + "AND o.previewImageAvailable = false " + + "AND o.previewshavefailed = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + "AND df.contenttype = 'application/pdf' " From ac5a9564848ba241a993e8e9252641820e9041b4 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 20 Jun 2023 10:22:59 -0400 Subject: [PATCH 0030/1112] use getThumbnailByVersionId --- .../iq/dataverse/ThumbnailServiceWrapper.java | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 19c53ffa77e..ff5e510e82c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -226,23 +226,20 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo DataFile thumbnailFile = dataset.getThumbnailFile(); if (thumbnailFile == null) { - thumbnailFile = DatasetUtil.attemptToAutomaticallySelectThumbnailFromDataFiles(dataset, null); - if (thumbnailFile == null) { - logger.fine("Dataset (id :" + dataset.getId() + ") does not have a logo available that could be selected automatically."); - return null; - } - } - if (thumbnailFile.isRestricted()) { - logger.fine("Dataset (id :" + dataset.getId() + ") has a logo the user selected but the file must have later been restricted. Returning null."); - return null; + + // We attempt to auto-select via the optimized, native query-based method + // from the DatasetVersionService: + if (datasetVersionService.getThumbnailByVersionId(versionId) == null) { + return null; + } } - String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; logger.fine("getDatasetCardImageAsBase64Url: " + url); this.dvobjectThumbnailsMap.put(datasetId,url); return url; + /* String cardImageUrl = null; StorageIO dataAccess = null; From 98acd6b50af770779329de1201663d8599edf16a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 20 Jun 2023 10:49:24 -0400 Subject: [PATCH 0031/1112] cleanup --- .../dataverse/dataaccess/ImageThumbConverter.java | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 3033269f3bc..458b8da227b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -196,7 +196,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s // We rely on ImageMagick to convert PDFs; so if it's not installed, // better give up right away: if (!isImageMagickInstalled()) { - logger.info("Couldn't find IM"); + logger.fine("Couldn't find ImageMagick"); return false; } @@ -220,19 +220,15 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s } catch (IOException ioex) { logger.warning(ioex.getMessage()); - ioex.printStackTrace(); // this on the other hand is likely a fatal condition :( return false; } if (tempFilesRequired) { - //ReadableByteChannel pdfFileChannel; - logger.info("Creating temp file"); InputStream inputStream = null; try { storageIO.open(); inputStream = storageIO.getInputStream(); - //pdfFileChannel = storageIO.getReadChannel(); } catch (Exception ioex) { logger.warning("caught Exception trying to open an input stream for " + storageIO.getDataFile().getStorageIdentifier()); return false; @@ -240,14 +236,11 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s File tempFile; OutputStream outputStream = null; - //FileChannel tempFileChannel = null; try { tempFile = File.createTempFile("tempFileToRescale", ".tmp"); outputStream = new FileOutputStream(tempFile); - long sz = inputStream.transferTo(outputStream); - logger.info("Wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); - - //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); + //Reads/transfers all bytes from the input stream to the output stream. + inputStream.transferTo(outputStream); } catch (IOException ioex) { logger.warning("GenerateImageThumb: failed to save pdf bytes in a temporary file."); return false; From 610c65dc9ddd403041ee95475810db2977e57623 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 21 Jun 2023 12:56:13 -0400 Subject: [PATCH 0032/1112] rename and cleanup --- .../edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../iq/dataverse/DataverseServiceBean.java | 45 ------- .../iq/dataverse/ThumbnailServiceWrapper.java | 117 +----------------- .../search/SearchIncludeFragment.java | 2 +- 4 files changed, 6 insertions(+), 160 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 3d608153ba3..2ca1fb825f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -483,7 +483,7 @@ public String getThumbnailString() { thumbnailString = datasetThumbnail.getBase64image(); } else { - thumbnailString = thumbnailServiceWrapper.getDatasetCardImageAsBase64Url(dataset, + thumbnailString = thumbnailServiceWrapper.getDatasetCardImageAsUrl(dataset, workingVersion.getId(), !workingVersion.isDraft(), ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index e092f209acd..e99458fbc9d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -346,51 +346,6 @@ public String getDataverseLogoThumbnailAsBase64ById(Long dvId) { } return null; } - - /* - public boolean isDataverseLogoThumbnailAvailable(Dataverse dataverse, User user) { - if (dataverse == null) { - return false; - } - - // First, check if the dataverse has a defined logo: - - //if (dataverse.getDataverseTheme() != null && dataverse.getDataverseTheme().getLogo() != null && !dataverse.getDataverseTheme().getLogo().equals("")) { - File dataverseLogoFile = getLogo(dataverse); - if (dataverseLogoFile != null) { - String logoThumbNailPath = null; - - if (dataverseLogoFile.exists()) { - logoThumbNailPath = ImageThumbConverter.generateImageThumbnailFromFile(dataverseLogoFile.getAbsolutePath(), 48); - if (logoThumbNailPath != null) { - return true; - } - } - } - //} - */ - // If there's no uploaded logo for this dataverse, go through its - // [released] datasets and see if any of them have card images: - // - // TODO: - // Discuss/Decide if we really want to do this - i.e., go through every - // file in every dataset below... - // -- L.A. 4.0 beta14 - /* - for (Dataset dataset : datasetService.findPublishedByOwnerId(dataverse.getId())) { - if (dataset != null) { - DatasetVersion releasedVersion = dataset.getReleasedVersion(); - - if (releasedVersion != null) { - if (datasetService.isDatasetCardImageAvailable(releasedVersion, user)) { - return true; - } - } - } - } */ - /* - return false; - } */ private File getLogo(Dataverse dataverse) { if (dataverse.getId() == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index ff5e510e82c..c75c29ea094 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -177,7 +177,7 @@ public boolean isThumbnailAvailable(DataFile entity) { // it's the responsibility of the user - to make sure the search result // passed to this method is of the Dataset type! - public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { + public String getDatasetCardImageAsUrl(SolrSearchResult result) { // Before we do anything else, check if it's a harvested dataset; // no need to check anything else if so (harvested datasets never have // thumbnails) @@ -199,10 +199,10 @@ public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { Long versionId = result.getDatasetVersionId(); - return getDatasetCardImageAsBase64Url(dataset, versionId, result.isPublishedState(), ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + return getDatasetCardImageAsUrl(dataset, versionId, result.isPublishedState(), ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); } - public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, boolean autoselect, int size) { + public String getDatasetCardImageAsUrl(Dataset dataset, Long versionId, boolean autoselect, int size) { Long datasetId = dataset.getId(); if (datasetId != null) { if (this.dvobjectThumbnailsMap.containsKey(datasetId)) { @@ -235,118 +235,9 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo } String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; - logger.fine("getDatasetCardImageAsBase64Url: " + url); + logger.fine("getDatasetCardImageAsUrl: " + url); this.dvobjectThumbnailsMap.put(datasetId,url); return url; - - -/* - String cardImageUrl = null; - StorageIO dataAccess = null; - - try{ - dataAccess = DataAccess.getStorageIO(dataset); - } - catch(IOException ioex){ - // ignore - } - - InputStream in = null; - // See if the dataset already has a dedicated thumbnail ("logo") saved as - // an auxilary file on the dataset level: - // (don't bother checking if it exists; just try to open the input stream) - try { - in = dataAccess.getAuxFileAsInputStream(datasetLogoThumbnail + ".thumb" + size); - //thumb48addedByImageThumbConverter); - } catch (Exception ioex) { - //ignore - } - - if (in != null) { - try { - byte[] bytes = IOUtils.toByteArray(in); - String base64image = Base64.getEncoder().encodeToString(bytes); - cardImageUrl = FileUtil.DATA_URI_SCHEME + base64image; - this.dvobjectThumbnailsMap.put(datasetId, cardImageUrl); - return cardImageUrl; - } catch (IOException ex) { - this.dvobjectThumbnailsMap.put(datasetId, ""); - return null; - // (alternatively, we could ignore the exception, and proceed with the - // regular process of selecting the thumbnail from the available - // image files - ?) - } finally - { - IOUtils.closeQuietly(in); - } - } - - // If not, see if the dataset has one of its image files already assigned - // to be the designated thumbnail: - cardImageUrl = this.getAssignedDatasetImage(dataset, size); - - if (cardImageUrl != null) { - //logger.info("dataset id " + result.getEntity().getId() + " has a dedicated image assigned; returning " + cardImageUrl); - return cardImageUrl; - } - - // And finally, try to auto-select the thumbnail (unless instructed not to): - - if (!autoselect) { - return null; - } - - // We attempt to auto-select via the optimized, native query-based method - // from the DatasetVersionService: - Long thumbnailImageFileId = datasetVersionService.getThumbnailByVersionId(versionId); - - if (thumbnailImageFileId != null) { - //cardImageUrl = FILE_CARD_IMAGE_URL + thumbnailImageFileId; - if (this.dvobjectThumbnailsMap.containsKey(thumbnailImageFileId)) { - // Yes, return previous answer - //logger.info("using cached result for ... "+datasetId); - if (!"".equals(this.dvobjectThumbnailsMap.get(thumbnailImageFileId))) { - return this.dvobjectThumbnailsMap.get(thumbnailImageFileId); - } - return null; - } - - DataFile thumbnailImageFile = null; - - if (dvobjectViewMap.containsKey(thumbnailImageFileId) - && dvobjectViewMap.get(thumbnailImageFileId).isInstanceofDataFile()) { - thumbnailImageFile = (DataFile) dvobjectViewMap.get(thumbnailImageFileId); - } else { - thumbnailImageFile = dataFileService.findCheapAndEasy(thumbnailImageFileId); - if (thumbnailImageFile != null) { - // TODO: - // do we need this file on the map? - it may not even produce - // a thumbnail! - dvobjectViewMap.put(thumbnailImageFileId, thumbnailImageFile); - } else { - this.dvobjectThumbnailsMap.put(thumbnailImageFileId, ""); - return null; - } - } - - if (isThumbnailAvailable(thumbnailImageFile)) { - cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( - thumbnailImageFile, - size); - //ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - } - - if (cardImageUrl != null) { - this.dvobjectThumbnailsMap.put(thumbnailImageFileId, cardImageUrl); - } else { - this.dvobjectThumbnailsMap.put(thumbnailImageFileId, ""); - } - } - - //logger.info("dataset id " + result.getEntityId() + ", returning " + cardImageUrl); - - return cardImageUrl; - */ } // it's the responsibility of the user - to make sure the search result diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index bfe397cf48c..99fe4cd979b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -1302,7 +1302,7 @@ public void setDisplayCardValues() { result.setImageUrl(thumbnailServiceWrapper.getDataverseCardImageAsBase64Url(result)); } else if (result.getType().equals("datasets")) { if (result.getEntity() != null) { - result.setImageUrl(thumbnailServiceWrapper.getDatasetCardImageAsBase64Url(result)); + result.setImageUrl(thumbnailServiceWrapper.getDatasetCardImageAsUrl(result)); } if (result.isHarvested()) { From 391504de43d8992e4b97d506fdfc763e512a8fc4 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 21 Jun 2023 13:46:35 -0400 Subject: [PATCH 0033/1112] api docs --- doc/sphinx-guides/source/api/native-api.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index b39cf91337a..24f6c0d4ced 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4649,3 +4649,23 @@ A curl example using an ``ID`` curl -X POST -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/feedback Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. + +.. _thumbnail_reset: + +Reset Thumbnail Failure Flags +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If Dataverse attempts to create a thumbnail image for an image or pdf file and the attempt fails, Dataverse will set a flag for the file to avoid repeated attempts to generate the thumbnail. +For cases where the problem may have been temporary (or fixed in a later Dataverse release), two API calls exist to reset this flag for all files or for a given file. + +Curl examples + +.. code-block:: bash + + export SERVER_URL=http://localhost + export fileID=1234 + + curl -X DELETE $SERVER_URL/api/admin/clearThumbnailFailureFlag + + curl -X DELETE $SERVER_URL/api/admin/clearThumbnailFailureFlag/$fileID + From de7963a0635646f6c00e1362fc87152029394839 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 21 Jun 2023 13:53:30 -0400 Subject: [PATCH 0034/1112] refactor typo --- .../iq/dataverse/dataaccess/ImageThumbConverter.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 458b8da227b..febf659b71a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -114,11 +114,11 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s logger.fine("Found cached thumbnail for " + file.getId()); return true; } - return generateThumbnail(storageIO, size); + return generateThumbnail(file, storageIO, size); } - private static boolean generateThumbnail(StorageIO storageIO, int size) { + private static boolean generateThumbnail(DataFile file, StorageIO storageIO, int size) { logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { @@ -449,7 +449,7 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); // try to generate, if not available and hasn't failed before - if(generateThumbnail(storageIO, size)) { + if(generateThumbnail(file, storageIO, size)) { try { cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); } catch (Exception ioEx) { From d2d7f4df4ef0770d2948a8027cf91c16fda1b1e8 Mon Sep 17 00:00:00 2001 From: okaradeniz Date: Fri, 14 Jul 2023 12:07:52 +0200 Subject: [PATCH 0035/1112] cite selected dataset version in citation downloads --- .../iq/dataverse/FileDownloadServiceBean.java | 74 +++++++++++++++---- src/main/webapp/dataset-citation.xhtml | 6 +- src/main/webapp/file.xhtml | 6 +- 3 files changed, 64 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index a90489be29a..ff904c41cb8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -349,6 +349,16 @@ public void downloadDatasetCitationXML(Dataset dataset) { downloadCitationXML(null, dataset, false); } + public void downloadDatasetCitationXML(DatasetVersion version) { + // DatasetVersion-level citation: + DataCitation citation=null; + citation = new DataCitation(version); + + String fileNameString; + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".xml"; + downloadXML(citation, fileNameString); + } + public void downloadDatafileCitationXML(FileMetadata fileMetadata) { downloadCitationXML(fileMetadata, null, false); } @@ -364,9 +374,6 @@ public void downloadCitationXML(FileMetadata fileMetadata, Dataset dataset, bool } else { citation= new DataCitation(fileMetadata, direct); } - FacesContext ctx = FacesContext.getCurrentInstance(); - HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); - response.setContentType("text/xml"); String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: @@ -375,14 +382,21 @@ public void downloadCitationXML(FileMetadata fileMetadata, Dataset dataset, bool // Datafile-level citation: fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.ENDNOTE); } + downloadXML(citation, fileNameString); + } + + public void downloadXML(DataCitation citation, String fileNameString) { + FacesContext ctx = FacesContext.getCurrentInstance(); + HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); + response.setContentType("text/xml"); response.setHeader("Content-Disposition", fileNameString); + try { ServletOutputStream out = response.getOutputStream(); citation.writeAsEndNoteCitation(out); out.flush(); ctx.responseComplete(); } catch (IOException e) { - } } @@ -392,6 +406,16 @@ public void downloadDatasetCitationRIS(Dataset dataset) { } + public void downloadDatasetCitationRIS(DatasetVersion version) { + // DatasetVersion-level citation: + DataCitation citation=null; + citation = new DataCitation(version); + + String fileNameString; + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".ris"; + downloadRIS(citation, fileNameString); + } + public void downloadDatafileCitationRIS(FileMetadata fileMetadata) { downloadCitationRIS(fileMetadata, null, false); } @@ -408,10 +432,6 @@ public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, bool citation= new DataCitation(fileMetadata, direct); } - FacesContext ctx = FacesContext.getCurrentInstance(); - HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); - response.setContentType("application/download"); - String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: @@ -420,6 +440,14 @@ public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, bool // Datafile-level citation: fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.RIS); } + downloadRIS(citation, fileNameString); + } + + public void downloadRIS(DataCitation citation, String fileNameString) { + //SEK 12/3/2018 changing this to open the json in a new tab. + FacesContext ctx = FacesContext.getCurrentInstance(); + HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); + response.setContentType("application/download"); response.setHeader("Content-Disposition", fileNameString); try { @@ -431,7 +459,7 @@ public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, bool } } - + private String getFileNameFromPid(GlobalId id) { return id.asString(); } @@ -442,6 +470,16 @@ public void downloadDatasetCitationBibtex(Dataset dataset) { } + public void downloadDatasetCitationBibtex(DatasetVersion version) { + // DatasetVersion-level citation: + DataCitation citation=null; + citation = new DataCitation(version); + + String fileNameString; + fileNameString = "inline;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".bib"; + downloadBibtex(citation, fileNameString); + } + public void downloadDatafileCitationBibtex(FileMetadata fileMetadata) { downloadCitationBibtex(fileMetadata, null, false); } @@ -457,13 +495,7 @@ public void downloadCitationBibtex(FileMetadata fileMetadata, Dataset dataset, b } else { citation= new DataCitation(fileMetadata, direct); } - //SEK 12/3/2018 changing this to open the json in a new tab. - FacesContext ctx = FacesContext.getCurrentInstance(); - HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); - - //Fix for 6029 FireFox was failing to parse it when content type was set to json - response.setContentType("text/plain"); - + String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: @@ -472,6 +504,16 @@ public void downloadCitationBibtex(FileMetadata fileMetadata, Dataset dataset, b // Datafile-level citation: fileNameString = "inline;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.BIBTEX); } + downloadBibtex(citation, fileNameString); + } + + public void downloadBibtex(DataCitation citation, String fileNameString) { + //SEK 12/3/2018 changing this to open the json in a new tab. + FacesContext ctx = FacesContext.getCurrentInstance(); + HttpServletResponse response = (HttpServletResponse) ctx.getExternalContext().getResponse(); + + //Fix for 6029 FireFox was failing to parse it when content type was set to json + response.setContentType("text/plain"); response.setHeader("Content-Disposition", fileNameString); try { diff --git a/src/main/webapp/dataset-citation.xhtml b/src/main/webapp/dataset-citation.xhtml index 9baced25be0..4162bfd92e4 100644 --- a/src/main/webapp/dataset-citation.xhtml +++ b/src/main/webapp/dataset-citation.xhtml @@ -33,17 +33,17 @@
+ + + +
+
+
+ + +
+
+
+
-
+
#{msg.rendered()} From 00a17071c358b7ebee09e77130cb7319c665dfb5 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 13:38:36 -0500 Subject: [PATCH 0212/1112] Revert "allow longer custom questions" This reverts commit ba4d178f5c541ec88ea0879ec5c715bda529f2c9. --- src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java index d880da5b4a8..2cb6f27c3e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java @@ -2,7 +2,7 @@ import java.io.Serializable; import java.util.List; import jakarta.persistence.*; -import jakarta.validation.constraints.NotBlank; +import org.hibernate.validator.constraints.NotBlank; /** * @@ -41,7 +41,7 @@ public void setId(Long id) { private String questionType; @NotBlank(message = "{custom.questiontext}") - @Column( nullable = false, columnDefinition = "TEXT") + @Column( nullable = false ) private String questionString; private boolean required; From d3fbee58262ac439a0b10f4ca7e1494dea4a6c5d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 13:38:43 -0500 Subject: [PATCH 0213/1112] Revert "add return null if commandexception" This reverts commit aa7eceeb762eca045127cf91acb35d6c62b00d79. --- src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java index 8b09291d052..9fb584a9133 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java @@ -320,7 +320,7 @@ public String save() { logger.info("Guestbook Page Command Exception. Dataverse: " + dataverse.getName()); logger.info(ex.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("guestbook.save.fail"), " - " + ex.toString())); - return null; + //logger.severe(ex.getMessage()); } editMode = null; String msg = (create)? BundleUtil.getStringFromBundle("guestbook.create"): BundleUtil.getStringFromBundle("guestbook.save"); From 4b347c7ec13591ba38ffa55fbde394cce2b8bcfe Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 10 Nov 2023 17:47:17 -0500 Subject: [PATCH 0214/1112] doc update --- .../source/developers/big-data-support.rst | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index 1917967b3f3..d38f7f27a68 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -149,20 +149,30 @@ Globus File Transfer Note: Globus file transfer is still experimental but feedback is welcome! See :ref:`support`. -Users can transfer files via `Globus `_ into and out of datasets when their Dataverse installation is configured to use a Globus accessible S3 store and a community-developed `dataverse-globus `_ "transfer" app has been properly installed and configured. +Users can transfer files via `Globus `_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) +and a community-developed `dataverse-globus `_ app has been properly installed and configured. Due to differences in the access control models of a Dataverse installation and Globus, enabling the Globus capability on a store will disable the ability to restrict and embargo files in that store. -As Globus aficionados know, Globus endpoints can be in a variety of places, from data centers to personal computers. This means that from within the Dataverse software, a Globus transfer can feel like an upload or a download (with Globus Personal Connect running on your laptop, for example) or it can feel like a true transfer from one server to another (from a cluster in a data center into a Dataverse dataset or vice versa). +Globus endpoints can be in a variety of places, from data centers to personal computers. +This means that from within the Dataverse software, a Globus transfer can feel like an upload or a download (with Globus Personal Connect running on your laptop, for example) or it can feel like a true transfer from one server to another (from a cluster in a data center into a Dataverse dataset or vice versa). -Globus transfer uses a very efficient transfer mechanism and has additional features that make it suitable for large files and large numbers of files: +Globus transfer uses an efficient transfer mechanism and has additional features that make it suitable for large files and large numbers of files: * robust file transfer capable of restarting after network or endpoint failures * third-party transfer, which enables a user accessing a Dataverse installation in their desktop browser to initiate transfer of their files from a remote endpoint (i.e. on a local high-performance computing cluster), directly to an S3 store managed by the Dataverse installation -Globus transfer requires use of the Globus S3 connector which requires a paid Globus subscription at the host institution. Users will need a Globus account which could be obtained via their institution or directly from Globus (at no cost). +Dataverse supports three options for using Globus, two involving transfer to Dataverse-managed endpoints and one allowing Dataverse to reference files on remote endpoints. +Dataverse-managed endpoints must be Globus 'guest collections' hosted on either a file-system-based endpoint or an S3-based endpoint (the latter requires use of the Globus +S3 connector which requires a paid Globus subscription at the host institution). In either case, Dataverse is configured with the Globus credentials of a user account that can manage the endpoint. +Users will need a Globus account, which can be obtained via their institution or directly from Globus (at no cost). -The setup required to enable Globus is described in the `Community Dataverse-Globus Setup and Configuration document `_ and the references therein. +For the reference use case, Dataverse must be configured with a list of allowed endpoint/base paths from which files may be referenced. In this case, since Dataverse is not accessing the remote endpoint itself, it does not need Globus credentials. +Users will need a Globus account in this case, and the remote endpoint must be configured to allow them access (i.e. be publicly readable, or potentially involving some out-of-band mechanism to request access (that could be described in the dataset's Terms of Use and Access). + +All of Dataverse's Globus capabilities are now store-based (see the store documentation) and therefore different collections/datasets can be configured to use different Globus-capable stores (or normal file, S3 stores, etc.) + +More details of the setup required to enable Globus is described in the `Community Dataverse-Globus Setup and Configuration document `_ and the references therein. As described in that document, Globus transfers can be initiated by choosing the Globus option in the dataset upload panel. (Globus, which does asynchronous transfers, is not available during dataset creation.) Analogously, "Globus Transfer" is one of the download options in the "Access Dataset" menu and optionally the file landing page download menu (if/when supported in the dataverse-globus app). From 6ad55eb689071921857a9f97135e97dd2e71c076 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 10 Nov 2023 17:50:16 -0500 Subject: [PATCH 0215/1112] Support multiple ref endpoints for non-managed case --- .../harvard/iq/dataverse/api/Datasets.java | 72 ++++---- .../dataaccess/GlobusAccessibleStore.java | 14 +- .../dataaccess/GlobusOverlayAccessIO.java | 166 +++++++++++++----- .../dataaccess/RemoteOverlayAccessIO.java | 47 +++-- .../dataverse/globus/GlobusServiceBean.java | 31 +++- 5 files changed, 226 insertions(+), 104 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b1c528f3fd9..a57f373f106 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3601,13 +3601,11 @@ public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @Pat } JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder(); - if (managed) { - - allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusTransferPaths") + String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths"; + allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName) .add(URLTokenUtil.HTTP_METHOD, "POST") - .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusTransferPaths") + .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusPaths") .add(URLTokenUtil.TIMEOUT, 300)); - } allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles") .add(URLTokenUtil.HTTP_METHOD, "POST") .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles") @@ -3632,7 +3630,7 @@ public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @Pat */ @POST @AuthRequired - @Path("{id}/requestGlobusTransferPaths") + @Path("{id}/requestGlobusPaths") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, String jsonBody @@ -3666,35 +3664,45 @@ public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathP } catch (WrappedResponse wr) { return wr.getResponse(); } - - if(!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) { - return badRequest("This dataset does not have managed Globus storage"); - } - if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset) .canIssue(UpdateDatasetVersionCommand.class)) { - try { + JsonObject params = JsonUtil.getJsonObject(jsonBody); - String principal = params.getString("principal"); - int numberOfPaths = params.getInt("numberOfFiles"); - if(numberOfPaths <=0) { - return badRequest("numberOfFiles must be positive"); - } - - JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths); - switch (response.getInt("status")) { - case 201: - return ok(response.getJsonObject("paths")); - case 400: - return badRequest("Unable to grant permission"); - case 409: - return conflict("Permission already exists"); - default: - return error(null, "Unexpected error when granting permission"); - } - } catch (NullPointerException|ClassCastException e) { - return badRequest("Error retrieving principal and numberOfFiles from JSON request body"); - + if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) { + try { + JsonArray referencedFiles = params.getJsonArray("referencedFiles"); + if (referencedFiles == null || referencedFiles.size() == 0) { + return badRequest("No referencedFiles specified"); + } + JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles); + return (ok(fileMap)); + } catch (Exception e) { + return badRequest(e.getLocalizedMessage()); + } + } else { + try { + String principal = params.getString("principal"); + int numberOfPaths = params.getInt("numberOfFiles"); + if (numberOfPaths <= 0) { + return badRequest("numberOfFiles must be positive"); + } + + JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths); + switch (response.getInt("status")) { + case 201: + return ok(response.getJsonObject("paths")); + case 400: + return badRequest("Unable to grant permission"); + case 409: + return conflict("Permission already exists"); + default: + return error(null, "Unexpected error when granting permission"); + } + + } catch (NullPointerException | ClassCastException e) { + return badRequest("Error retrieving principal and numberOfFiles from JSON request body"); + + } } } else { return forbidden("User doesn't have permission to upload to this dataset"); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java index 1d98044b2b5..afc7556481a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java @@ -7,8 +7,7 @@ public interface GlobusAccessibleStore { static final String MANAGED = "managed"; - static final String GLOBUS_TRANSFER_ENDPOINT_WITH_BASEPATH = "globus-transfer-endpoint-with-basepath"; - static final String GLOBUS_REFERENCE_ENDPOINTS_WITH_BASEPATHS = "globus-reference-endpoints-with-basepaths"; + static final String TRANSFER_ENDPOINT_WITH_BASEPATH = "transfer-endpoint-with-basepath"; static final String GLOBUS_TOKEN = "globus-token"; public static boolean isDataverseManaged(String driverId) { @@ -16,37 +15,36 @@ public static boolean isDataverseManaged(String driverId) { } public static String getTransferEndpointId(String driverId) { - String endpointWithBasePath = StorageIO.getConfigParamForDriver(driverId, GLOBUS_TRANSFER_ENDPOINT_WITH_BASEPATH); + String endpointWithBasePath = StorageIO.getConfigParamForDriver(driverId, TRANSFER_ENDPOINT_WITH_BASEPATH); int pathStart = endpointWithBasePath.indexOf("/"); return pathStart > 0 ? endpointWithBasePath.substring(0, pathStart) : endpointWithBasePath; } public static String getTransferPath(String driverId) { - String endpointWithBasePath = StorageIO.getConfigParamForDriver(driverId, GLOBUS_TRANSFER_ENDPOINT_WITH_BASEPATH); + String endpointWithBasePath = StorageIO.getConfigParamForDriver(driverId, TRANSFER_ENDPOINT_WITH_BASEPATH); int pathStart = endpointWithBasePath.indexOf("/"); return pathStart > 0 ? endpointWithBasePath.substring(pathStart) : ""; } public static JsonArray getReferenceEndpointsWithPaths(String driverId) { - String[] endpoints = StorageIO.getConfigParamForDriver(driverId, GLOBUS_REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*"); + String[] endpoints = StorageIO.getConfigParamForDriver(driverId, RemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*"); JsonArrayBuilder builder = Json.createArrayBuilder(); for(int i=0;i/// * - * baseUrl: globus:// + * transfer and reference endpoint formats: + * reference endpoints separated by a comma * */ public class GlobusOverlayAccessIO extends RemoteOverlayAccessIO implements GlobusAccessibleStore { @@ -50,7 +53,7 @@ public class GlobusOverlayAccessIO extends RemoteOverlayAcce * Dataverse/the globus app manage file locations, access controls, deletion, * etc. */ - private boolean dataverseManaged = false; + private Boolean dataverseManaged = null; private String relativeDirectoryPath; @@ -58,22 +61,59 @@ public class GlobusOverlayAccessIO extends RemoteOverlayAcce private String filename; + private String[] allowedEndpoints; private String endpoint; public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); - dataverseManaged = GlobusAccessibleStore.isDataverseManaged(this.driverId); } + + public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException { + this.driverId = driverId; + configureStores(null, driverId, storageLocation); + if (isManaged()) { + String[] parts = DataAccess.getDriverIdAndStorageLocation(storageLocation); + path = parts[1]; + } else { + this.setIsLocalFile(false); + path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); + validatePath(path); + logger.fine("Referenced path: " + path); + } + } + private boolean isManaged() { + if(dataverseManaged==null) { + dataverseManaged = GlobusAccessibleStore.isDataverseManaged(this.driverId); + } + return dataverseManaged; + } + + private String retrieveGlobusAccessToken() { + String globusToken = getConfigParam(GlobusAccessibleStore.GLOBUS_TOKEN); + + + AccessToken accessToken = GlobusServiceBean.getClientToken(globusToken); + return accessToken.getOtherTokens().get(0).getAccessToken(); + } + + private void parsePath() { int filenameStart = path.lastIndexOf("/") + 1; - String endpointWithBasePath = baseUrl.substring(baseUrl.lastIndexOf(DataAccess.SEPARATOR) + 3); + String endpointWithBasePath = null; + if (!isManaged()) { + endpointWithBasePath = findMatchingEndpoint(path, allowedEndpoints); + } else { + endpointWithBasePath = allowedEndpoints[0]; + } + //String endpointWithBasePath = baseEndpointPath.substring(baseEndpointPath.lastIndexOf(DataAccess.SEPARATOR) + 3); int pathStart = endpointWithBasePath.indexOf("/"); logger.info("endpointWithBasePath: " + endpointWithBasePath); endpointPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart + 1) : ""); logger.info("endpointPath: " + endpointPath); + - if (dataverseManaged && (dvObject!=null)) { + if (isManaged() && (dvObject!=null)) { Dataset ds = null; if (dvObject instanceof Dataset) { @@ -95,40 +135,36 @@ private void parsePath() { } - public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException { - this.driverId = driverId; - configureStores(null, driverId, storageLocation); - this.dataverseManaged = GlobusAccessibleStore.isDataverseManaged(this.driverId); - if (dataverseManaged) { - String[] parts = DataAccess.getDriverIdAndStorageLocation(storageLocation); - path = parts[1]; - } else { - this.setIsLocalFile(false); - path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); - validatePath(path); - logger.fine("Relative path: " + path); + private static String findMatchingEndpoint(String path, String[] allowedEndpoints) { + for(int i=0;i 0) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java index aafab038ae2..5463254140d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java @@ -65,7 +65,10 @@ public class RemoteOverlayAccessIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); + // A single baseUrl of the form http(s):// where this store can reference data static final String BASE_URL = "base-url"; + // Multiple endpoints where data can be referenced from. Multiple endpoints are separated by a comma. Multiple endpoints are only supported by the GlobalOverlayAccessIO at present. + static final String REFERENCE_ENDPOINTS_WITH_BASEPATHS = "reference-endpoints-with-basepaths"; static final String BASE_STORE = "base-store"; static final String SECRET_KEY = "secret-key"; static final String URL_EXPIRATION_MINUTES = "url-expiration-minutes"; @@ -74,7 +77,7 @@ public class RemoteOverlayAccessIO extends StorageIO { protected StorageIO baseStore = null; protected String path = null; - protected String baseUrl = null; + private String baseUrl = null; protected static HttpClientContext localContext = HttpClientContext.create(); protected PoolingHttpClientConnectionManager cm = null; @@ -110,7 +113,7 @@ public RemoteOverlayAccessIO(String storageLocation, String driverId) throws IOE logger.fine("Relative path: " + path); } - private void validatePath(String relPath) throws IOException { + protected void validatePath(String relPath) throws IOException { try { URI absoluteURI = new URI(baseUrl + "/" + relPath); if (!absoluteURI.normalize().toString().startsWith(baseUrl)) { @@ -457,19 +460,8 @@ int getUrlExpirationMinutes() { } protected void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { - baseUrl = getConfigParam(BASE_URL); - if (baseUrl == null) { - throw new IOException("dataverse.files." + this.driverId + ".base-url is required"); - } else { - try { - new URI(baseUrl); - } catch (Exception e) { - logger.warning( - "Trouble interpreting base-url for store: " + this.driverId + " : " + e.getLocalizedMessage()); - throw new IOException("Can't interpret base-url as a URI"); - } - - } + configureEndpoints(); + if (baseStore == null) { String baseDriverId = getBaseStoreIdFor(driverId); @@ -543,6 +535,31 @@ protected void configureStores(DataAccessRequest req, String driverId, String st } } + /** This endpoint configures all the endpoints the store is allowed to reference data from. At present, the RemoteOverlayAccessIO only supports a single endpoint but + * the derived GlobusOverlayAccessIO can support multiple endpoints. + * @throws IOException + */ + protected void configureEndpoints() throws IOException { + baseUrl = getConfigParam(BASE_URL); + if (baseUrl == null) { + //Will accept the first endpoint using the newer setting + baseUrl = getConfigParam(REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*")[0]; + if (baseUrl == null) { + throw new IOException("dataverse.files." + this.driverId + ".base-url is required"); + } + } + if (baseUrl != null) { + try { + new URI(baseUrl); + } catch (Exception e) { + logger.warning( + "Trouble interpreting base-url for store: " + this.driverId + " : " + e.getLocalizedMessage()); + throw new IOException("Can't interpret base-url as a URI"); + } + + } + } + // Convenience method to assemble the path, starting with the DOI // authority/identifier/, that is needed to create a base store via // DataAccess.getDirectStorageIO - the caller has to add the store type specific diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index dab0e36852c..3dee3bd498f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -6,7 +6,6 @@ import com.google.gson.FieldNamingPolicy; import com.google.gson.GsonBuilder; import edu.harvard.iq.dataverse.*; - import jakarta.ejb.Asynchronous; import jakarta.ejb.EJB; import jakarta.ejb.Stateless; @@ -20,6 +19,8 @@ import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonPatch; +import jakarta.json.JsonString; +import jakarta.json.JsonValue.ValueType; import jakarta.json.stream.JsonParsingException; import jakarta.servlet.http.HttpServletRequest; import jakarta.ws.rs.HttpMethod; @@ -57,7 +58,6 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; -import edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -284,6 +284,33 @@ public JsonObject requestAccessiblePaths(String principal, Dataset dataset, int return response.build(); } + public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray referencedFiles) { + String driverId = dataset.getEffectiveStorageDriverId(); + JsonArray endpoints = GlobusAccessibleStore.getReferenceEndpointsWithPaths(driverId); + + JsonObjectBuilder fileMap = Json.createObjectBuilder(); + referencedFiles.forEach(value -> { + if (value.getValueType() != ValueType.STRING) { + throw new JsonParsingException("ReferencedFiles must be strings", null); + } + String referencedFile = ((JsonString) value).getString(); + boolean valid = false; + for (int i = 0; i < endpoints.size(); i++) { + if (referencedFile.startsWith(((JsonString) endpoints.get(i)).getString())) { + valid = true; + } + } + if (!valid) { + throw new IllegalArgumentException( + "Referenced file " + referencedFile + " is not in an allowed endpoint/path"); + } + String storageIdentifier = DataAccess.getNewStorageIdentifier(driverId); + fileMap.add(referencedFile, + storageIdentifier + "//" + referencedFile); + }); + return fileMap.build(); + } + //Single cache of open rules/permission requests private final Cache rulesCache = Caffeine.newBuilder() .expireAfterWrite(Duration.of(JvmSettings.GLOBUS_RULES_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) From 48f02dde7f22b21e28c8d635df904b79532f042a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 10 Nov 2023 17:56:56 -0500 Subject: [PATCH 0216/1112] handle file not found case --- .../iq/dataverse/dataaccess/GlobusOverlayAccessIO.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 0dec7133fb5..f42f5443108 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -25,6 +25,7 @@ import org.apache.http.util.EntityUtils; import jakarta.json.Json; +import jakarta.json.JsonArray; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; @@ -189,7 +190,11 @@ public long retrieveSizeFromMedia() { String responseString = EntityUtils.toString(response.getEntity()); logger.info("Response from " + get.getURI().toString() + " is: " + responseString); JsonObject responseJson = JsonUtil.getJsonObject(responseString); - return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getInt("size"); + JsonArray dataArray = responseJson.getJsonArray("DATA"); + if (dataArray != null && dataArray.size() != 0) { + //File found + return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getInt("size"); + } } else { logger.warning("Response from " + get.getURI().toString() + " was " + response.getStatusLine().getStatusCode()); From c33f07aad938f4707e6985ddeeec801969e4a3fc Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Sat, 11 Nov 2023 14:38:00 -0500 Subject: [PATCH 0217/1112] Add logic to leave settings as found before test --- .../edu/harvard/iq/dataverse/api/ProvIT.java | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 3bfa3d72fbd..6b9b59f431d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -30,7 +30,12 @@ public static void setUpClass() { @Test public void testFreeformDraftActions() { - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + boolean provEnabled = provCollectionStatus.getStatusCode() == 200; + if(!provEnabled){ + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } + Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); createDepositor.then().assertThat() @@ -85,15 +90,20 @@ public void testFreeformDraftActions() { datasetVersions.prettyPrint(); datasetVersions.then().assertThat() .body("data[0].versionState", equalTo("DRAFT")); - - UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - + if(!provEnabled){ + UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } + } @Test public void testAddProvFile() { - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + boolean provEnabled = provCollectionStatus.getStatusCode() == 200; + if(!provEnabled){ + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); @@ -213,12 +223,8 @@ public void testAddProvFile() { deleteProvJson.then().assertThat() .statusCode(FORBIDDEN.getStatusCode()); //cannot delete json of a published dataset - UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); -// Command removed, redundant -// Response deleteProvFreeForm = UtilIT.deleteProvFreeForm(dataFileId.toString(), apiTokenForDepositor); -// deleteProvFreeForm.prettyPrint(); -// deleteProvFreeForm.then().assertThat() -// .statusCode(OK.getStatusCode()); - + if(!provEnabled){ + UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } } } From 6beafcef4855c2a35cfe6d61408a5625a285885e Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Sat, 11 Nov 2023 22:09:22 -0500 Subject: [PATCH 0218/1112] Change format to MD of the QA guide --- doc/sphinx-guides/source/index.rst | 2 +- doc/sphinx-guides/source/qa/conclusion.md | 11 -------- doc/sphinx-guides/source/qa/index.md | 10 +++++++ doc/sphinx-guides/source/qa/index.rst | 14 ---------- .../{manual-testing.rst => manual-testing.md} | 27 +++++++++---------- ...her-approaches.rst => other-approaches.md} | 24 ++++++++--------- .../source/qa/{overview.rst => overview.md} | 23 ++++++++-------- ...ormance-tests.rst => performance-tests.md} | 21 ++++++++------- ...ion.rst => test-automation-integration.md} | 24 ++++++++--------- ...tructure.rst => testing-infrastructure.md} | 15 +++++------ 10 files changed, 77 insertions(+), 94 deletions(-) delete mode 100644 doc/sphinx-guides/source/qa/conclusion.md create mode 100644 doc/sphinx-guides/source/qa/index.md delete mode 100755 doc/sphinx-guides/source/qa/index.rst rename doc/sphinx-guides/source/qa/{manual-testing.rst => manual-testing.md} (92%) rename doc/sphinx-guides/source/qa/{other-approaches.rst => other-approaches.md} (95%) rename doc/sphinx-guides/source/qa/{overview.rst => overview.md} (95%) rename doc/sphinx-guides/source/qa/{performance-tests.rst => performance-tests.md} (91%) rename doc/sphinx-guides/source/qa/{test-automation-integration.rst => test-automation-integration.md} (78%) rename doc/sphinx-guides/source/qa/{testing-infrastructure.rst => testing-infrastructure.md} (82%) diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst index 9d3d49ef4f2..3184160b387 100755 --- a/doc/sphinx-guides/source/index.rst +++ b/doc/sphinx-guides/source/index.rst @@ -20,7 +20,7 @@ These documentation guides are for the |version| version of Dataverse. To find g developers/index container/index style/index - qa/index + qa/index.md How the Guides Are Organized ---------------------------- diff --git a/doc/sphinx-guides/source/qa/conclusion.md b/doc/sphinx-guides/source/qa/conclusion.md deleted file mode 100644 index 233dc3cdf3d..00000000000 --- a/doc/sphinx-guides/source/qa/conclusion.md +++ /dev/null @@ -1,11 +0,0 @@ -Conclusion -========== - -QA is awesome. Do you know what else is awesome? Markdown. - -It's easy to create a [link](https://dataverse.org), for example, and nested bullets don't need extra indentation: - -- foo - - one - - two -- bar diff --git a/doc/sphinx-guides/source/qa/index.md b/doc/sphinx-guides/source/qa/index.md new file mode 100644 index 00000000000..c190d823bef --- /dev/null +++ b/doc/sphinx-guides/source/qa/index.md @@ -0,0 +1,10 @@ +# QA Guide + +```{toctree} +overview.md +testing-infrastructure.md +performance-tests.md +manual-testing.md +test-automation-integration.md +other-approaches.md +``` \ No newline at end of file diff --git a/doc/sphinx-guides/source/qa/index.rst b/doc/sphinx-guides/source/qa/index.rst deleted file mode 100755 index dd8c046fddc..00000000000 --- a/doc/sphinx-guides/source/qa/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -QA Guide -======== - -**Contents:** - -.. toctree:: - - overview - testing-infrastructure - performance-tests - manual-testing - test-automation-integration - other-approaches - conclusion diff --git a/doc/sphinx-guides/source/qa/manual-testing.rst b/doc/sphinx-guides/source/qa/manual-testing.md similarity index 92% rename from doc/sphinx-guides/source/qa/manual-testing.rst rename to doc/sphinx-guides/source/qa/manual-testing.md index 8e50e6b6b08..bf6f16f7911 100644 --- a/doc/sphinx-guides/source/qa/manual-testing.rst +++ b/doc/sphinx-guides/source/qa/manual-testing.md @@ -1,23 +1,22 @@ -Manual Testing Approach -======================= +# Manual Testing Approach -.. contents:: |toctitle| - :local: +```{contents} +:depth: 3 +``` +## Introduction -Introduction ------------- We use a risk-based, manual testing approach to achieve the most benefit with limited resources. This means we want to catch bugs where they are likely to exist, ensure core functions work, and failures do not have catastrophic results. In practice this means we do a brief positive check of core functions on each build called a smoke test, we test the most likely place for new bugs to exist, the area where things have changed, and attempt to prevent catastrophic failure by asking about the scope and reach of the code and how failures may occur. If it seems possible through user error or some other occurrence that such a serious failure will occur, we try to make it happen in the test environment. If the code has a UI component, we also do a limited amount of browser compatibility testing using Chrome, Firefox, and Safari browsers. We do not currently do UX or accessibility testing on a regular basis, though both have been done product-wide by the Design group and by the community. -Examining a Pull Pequest for Test Cases: ----------------------------------------- -What Problem Does it Solve? -++++++++++++++++++++++++++++++++++++++++++++ +## Examining a Pull Pequest for Test Cases: + +### What Problem Does it Solve? + Read the top part of the pull request for a description, notes for reviewers, and usually a how-to test section. Does it make sense? If not, read the underlying ticket it closes, and any release notes or documentation. Knowing in general what it does helps you to think about how to approach it. -How is it Configured? -+++++++++++++++++++++ +### How is it Configured? + Most pull requests do not have any special configuration and are enabled on deployment, but some do. Configuration is part of testing. An admin will need to follow these instructions so try them out. Plus, that is the only way you will get it working to test it! Identify test cases by examining the problem report or feature description and any documentation of functionality. Look for statements or assertions about functions, what it does, as well as conditions or conditional behavior. These become your test cases. Think about how someone might make a mistake using it and try it. Does it fail gracefully or in a confusing or worse, damaging manner? Also, consider whether this pull request may interact with other functionality and try some spot checks there. For instance, if new metadata fields are added, try the export feature. Of course, try the suggestions under how to test. Those may be sufficient, but you should always think about it based on what it does. @@ -32,8 +31,8 @@ Check permissions. Is this feature limited to a specific set of users? Can it be Think about risk. Is the feature or function part of a critical area such as permissions? Does the functionality modify data? You may do more testing when the risk is higher. -Smoke Test ------------ +## Smoke Test + 1. Go to the homepage on https://dataverse-internal.iq.harvard.edu. Scroll to the bottom to ensure the build number is the one you intend to test from Jenkins. 2. Create a new user: I use a formulaic name with my initials and date and make the username and password the same, eg. kc080622. diff --git a/doc/sphinx-guides/source/qa/other-approaches.rst b/doc/sphinx-guides/source/qa/other-approaches.md similarity index 95% rename from doc/sphinx-guides/source/qa/other-approaches.rst rename to doc/sphinx-guides/source/qa/other-approaches.md index bd92e7d22d8..b50d9d0cf11 100644 --- a/doc/sphinx-guides/source/qa/other-approaches.rst +++ b/doc/sphinx-guides/source/qa/other-approaches.md @@ -1,13 +1,13 @@ -Other approaches to deploying and testing -========================================= +# Other approaches to deploying and testing -.. contents:: |toctitle| - :local: +```{contents} +:depth: 3 +``` This workflow is fine for a single person testing a PR, one at a time. It would be awkward or impossible if there were multiple people wanting to test different PRs at the same time. I’m assuming if a developer is testing, they would likely just deploy to their dev environment. That might be ok but not sure the env is fully configured enough to offer a real-world testing scenario. An alternative might be to spin an EC2 branch on AWS, potentially using sample data. This can take some time so another option might be to spin up a few, persistent AWS instances with sample data this way, one per tester, and just deploy new builds there when you want to test. You could even configure Jenkins projects for each if desired to maintain consistency in how they’re built. -Tips and tricks ---------------- +## Tips and tricks + - Start testing simply, with the most obvious test. You don’t need to know all your tests upfront. As you gain comfort and understanding of how it works, try more tests until you are done. If it is a complex feature, jot down your tests in an outline format, some beforehand as a guide, and some after as things occur to you. Save the doc in a testing folder (I have one on Google Drive). This potentially will help with future testing. - When in doubt, ask someone. If you are confused about how something is working, it may be something you have missed, or it could be a documentation issue, or it could be a bug! Talk to the code reviewer and the contributor/developer for their opinion and advice. @@ -17,8 +17,8 @@ Tips and tricks - When testing an optional feature that requires configuration, do a smoke test without the feature configured and then with it configured. That way you know that folks using the standard config are unaffected by the option if they choose not to configure it. - Back up your DB before applying an irreversible DB update and you are using a persistent/reusable platform. Just in case it fails, and you need to carry on testing something else you can use the backup. -Workflow for Completing QA on a PR ------------------------------------ +## Workflow for Completing QA on a PR + 1. Assign the PR you are working on to yourself. @@ -106,8 +106,8 @@ Workflow for Completing QA on a PR Just a housekeeping move if the PR is from IQSS. Click the delete branch button where the merge button had been. There is no deletion for outside contributions. -Checklist for Completing QA on a PR ------------------------------------- +## Checklist for Completing QA on a PR + 1. Build the docs 2. Smoke test the pr @@ -115,8 +115,8 @@ Checklist for Completing QA on a PR 4. Regression test 5. Test any upgrade instructions -Checklist for QA on Release ---------------------------- +## Checklist for QA on Release + 1. Review Consolidated Release Notes, in particular upgrade instructions. 2. Conduct performance testing and compare with the previous release. diff --git a/doc/sphinx-guides/source/qa/overview.rst b/doc/sphinx-guides/source/qa/overview.md similarity index 95% rename from doc/sphinx-guides/source/qa/overview.rst rename to doc/sphinx-guides/source/qa/overview.md index 153fab1a28f..51b38ee0921 100644 --- a/doc/sphinx-guides/source/qa/overview.rst +++ b/doc/sphinx-guides/source/qa/overview.md @@ -1,26 +1,25 @@ -Overview -======== +# Overview -.. contents:: |toctitle| - :local: +```{contents} +:depth: 3 +``` +## Introduction -Introduction ------------- This document describes the testing process used by QA at IQSS and provides a guide for others filling in for that role. Please note that many variations are possible, and the main thing is to catch bugs and provide a good quality product to the user community. -Workflow --------- +## Workflow + The basic workflow is bugs or feature requests are submitted to GitHub by the community or by team members as issues. These issues are prioritized and added to a two-week sprint that is reflected on the GitHub Kanban board. As developers work on these issues, a GitHub branch is produced, code is contributed, and a pull request is made to merge these new changes back into the common develop branch and ultimately released as part of the product. Before a pull request is merged it must be reviewed by a member of the development team from a coding perspective, it must pass automated integration tests before moving to QA. There it is tested manually, exercising the UI using three common browser types and any business logic it implements. Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions are tested. Once this passes and any bugs that are found are corrected, the automated integration tests are confirmed to be passing, the PR is merged into development, the PR is closed, and the branch is deleted. At this point, the pr moves from the QA column automatically into the Done column and the process repeats with the next pr until it is decided to make a release. -Release Cadence and Sprints ---------------------------- +## Release Cadence and Sprints + A release likely spans multiple two-week sprints. Each sprint represents the priorities for that time and is sized so that the team can reasonably complete most of the work on time. This is a goal to help with planning, it is not a strict requirement. Some issues from the previous sprint may remain and likely be included in the next sprint but occasionally may be deprioritized and deferred to another time. The decision to make a release can be based on the time since the last release, some important feature needed by the community or contractual deadline, or some other logical reason to package the work completed into a named release and posted to the releases section on GitHub. -Performance Testing and Deployment ----------------------------------- +## Performance Testing and Deployment + The final testing activity before producing a release is performance testing. This could be done throughout the release cycle but since it is time-consuming it is done once near the end. Using a load-generating tool named Locust, it loads the statistically most loaded pages, according to Google Analytics, that is 50% homepage and 50% some type of dataset page. Since dataset page weight also varies by the number of files, a selection of about 10 datasets with varying file counts is used. The pages are called randomly as a guest user with increasing levels of user load, from 1 user to 250 users. Typical daily loads in production are around the 50-user level. Though the simulated user level does have a modest amount of random think time before repeated calls, from 5-20 seconds (I believe), it is not a real-world load so direct comparisons to production are not reliable. Instead, we compare performance to prior versions of the product and based on how that performed in production we have some idea whether this might be similar in performance or whether there is some undetected issue that appears under load, such as inefficient or too many DB queries per page. Once the performance has been tested and recorded in a Google spreadsheet for this proposed version, the release will be prepared and posted. diff --git a/doc/sphinx-guides/source/qa/performance-tests.rst b/doc/sphinx-guides/source/qa/performance-tests.md similarity index 91% rename from doc/sphinx-guides/source/qa/performance-tests.rst rename to doc/sphinx-guides/source/qa/performance-tests.md index 1bfde798100..7075d7f1776 100644 --- a/doc/sphinx-guides/source/qa/performance-tests.rst +++ b/doc/sphinx-guides/source/qa/performance-tests.md @@ -1,21 +1,22 @@ -Performance Testing -=================== +# Performance Testing -.. contents:: |toctitle| - :local: +```{contents} +:depth: 3 +``` + +## Introduction -Introduction ------------- To run performance tests, we have a performance test cluster on AWS that employs web, database, and Solr. The database contains a copy of production that is updated weekly on Sundays. To ensure the homepage content is consistent between test runs across releases, two scripts set the datasets that will appear on the homepage. There is a script on the web server in the default CentOS user dir and one on the database server in the default CentOS user dir. Run these scripts before conducting the tests. -Access ------- +## Access + Access to performance cluster instances requires ssh keys, see Leonid. The cluster itself is normally not running to reduce costs. To turn on the cluster, log on to the demo server and run the perfenv scripts from the centos default user dir. Access to the demo requires an ssh key, see Leonid. -Special Notes ⚠️ ------------------ +## Special Notes ⚠️ + Please note the performance database is also used occasionally by Julian and the Curation team to generate prod reports so a courtesy check with Julian would be good before taking over the env. + Executing the Performance Script -------------------------------- To execute the performance test script, you need to install a local copy of the database-helper-scripts project (https://github.com/IQSS/dataverse-helper-scripts), written by Raman. I have since produced a stripped-down script that calls just the DB and ds and works with python3. diff --git a/doc/sphinx-guides/source/qa/test-automation-integration.rst b/doc/sphinx-guides/source/qa/test-automation-integration.md similarity index 78% rename from doc/sphinx-guides/source/qa/test-automation-integration.rst rename to doc/sphinx-guides/source/qa/test-automation-integration.md index 13c48105f91..5e9d00cd461 100644 --- a/doc/sphinx-guides/source/qa/test-automation-integration.rst +++ b/doc/sphinx-guides/source/qa/test-automation-integration.md @@ -1,15 +1,15 @@ -Test automation and integration test -==================================== +# Test automation and integration test -.. contents:: |toctitle| - :local: +```{contents} +:depth: 3 +``` This test suite is added to and maintained by development. It is generally advisable for code contributors to add integration tests when adding new functionality. The approach here is one of code coverage: exercise as much of the code base’s code paths as possible, every time to catch bugs. This type of approach is often used to give contributing developers confidence that their code didn’t introduce any obvious, major issues and is run on each commit. Since it is a broad set of tests, it is not clear whether any specific, conceivable test is run but it does add a lot of confidence that the code base is functioning due to its reach and consistency. -Building and Deploying a Pull Request from Jenkins to Dataverse-Internal: -------------------------------------------------------------------------- +## Building and Deploying a Pull Request from Jenkins to Dataverse-Internal: + 1. Log on to GitHub, go to projects, dataverse to see Kanban board, select a pull request to test from the QA queue. @@ -17,12 +17,12 @@ Building and Deploying a Pull Request from Jenkins to Dataverse-Internal: 3. Log on to jenkins.dataverse.org, select the IQSS_Dataverse_Internal project, and configure the repository URL and branch specifier to match the ones from the pull request. For example: - - 8372-gdcc-xoai-library has IQSS implied - | **Repository URL:** https://github.com/IQSS/dataverse.git - | **Branch specifier:** \*/8372-gdcc-xoai-library - - GlobalDataverseCommunityConsortium:GDCC/DC-3B - | **Repository URL:** https://github.com/GlobalDataverseCommunityConsortium/dataverse.git - | **Branch specifier:** \*/GDCC/DC-3B. + * 8372-gdcc-xoai-library has IQSS implied + - **Repository URL:** https://github.com/IQSS/dataverse.git + - **Branch specifier:** */8372-gdcc-xoai-library + * GlobalDataverseCommunityConsortium:GDCC/DC-3B + - **Repository URL:** https://github.com/GlobalDataverseCommunityConsortium/dataverse.git + - **Branch specifier:** */GDCC/DC-3B. 4. Click Build Now and note the build number in progress. diff --git a/doc/sphinx-guides/source/qa/testing-infrastructure.rst b/doc/sphinx-guides/source/qa/testing-infrastructure.md similarity index 82% rename from doc/sphinx-guides/source/qa/testing-infrastructure.rst rename to doc/sphinx-guides/source/qa/testing-infrastructure.md index d35bc6e9a23..fb66bc4d099 100644 --- a/doc/sphinx-guides/source/qa/testing-infrastructure.rst +++ b/doc/sphinx-guides/source/qa/testing-infrastructure.md @@ -1,16 +1,15 @@ -Infrastructure for Testing -========================== +# Infrastructure for Testing -.. contents:: |toctitle| - :local: +```{contents} +:depth: 3 +``` +## Dataverse Internal -Dataverse Internal -------------------- To build and test a PR, we use a build named IQSS_Dataverse_Internal on jenkins.dataverse.org, which deploys the .war file to an AWS instance named dataverse-internal.iq.harvard.edu. Login to Jenkins requires a username and password. Check with Don Sizemore. Login to the dataverse-internal server requires a key, see Leonid. -Guides Server -------------- +## Guides Server + There is also a guides build project named guides.dataverse.org. Any test builds of guides are deployed to a named directory** on guides.dataverse.org and can be found and tested by going to the existing guides, removing the part of the URL that contains the version, and browsing the resulting directory listing for the latest change. Login to the guides server requires a key, see Don Sizemore. From 3407fb9f813984c857ef7708af7d6dc239b8f8ee Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2023 07:04:15 -0500 Subject: [PATCH 0219/1112] Add ProvIT to integration-tests.txt --- tests/integration-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration-tests.txt b/tests/integration-tests.txt index 18911b3164a..bb3bc7f9ce6 100644 --- a/tests/integration-tests.txt +++ b/tests/integration-tests.txt @@ -1 +1 @@ -DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT +DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,ProvIT From 2842cdaf246c531b04449ac4c8b20fc4a09c2668 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 13 Nov 2023 08:42:31 -0500 Subject: [PATCH 0220/1112] Move this change into BeforeAll/AfterAll --- .../edu/harvard/iq/dataverse/api/ProvIT.java | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 6b9b59f431d..69a87869fe1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -11,6 +11,9 @@ import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.AfterAll; + import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; @@ -20,22 +23,24 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; public class ProvIT { + + private static boolean provEnabled = false; @BeforeAll - public static void setUpClass() { + public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + + provEnabled = provCollectionStatus.getStatusCode() == 200; + if(!provEnabled){ + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } } @Test public void testFreeformDraftActions() { - Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - boolean provEnabled = provCollectionStatus.getStatusCode() == 200; - if(!provEnabled){ - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - } - Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); createDepositor.then().assertThat() @@ -90,20 +95,11 @@ public void testFreeformDraftActions() { datasetVersions.prettyPrint(); datasetVersions.then().assertThat() .body("data[0].versionState", equalTo("DRAFT")); - if(!provEnabled){ - UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - } - + } @Test - public void testAddProvFile() { - - Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - boolean provEnabled = provCollectionStatus.getStatusCode() == 200; - if(!provEnabled){ - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - } + public void testAddProvFile() { Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); @@ -223,6 +219,11 @@ public void testAddProvFile() { deleteProvJson.then().assertThat() .statusCode(FORBIDDEN.getStatusCode()); //cannot delete json of a published dataset + + } + + @AfterAll + public static void tearDownClass() { if(!provEnabled){ UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); } From 437e7ccd480dbae405238faffb9fff8a8317218d Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 13 Nov 2023 09:56:16 -0500 Subject: [PATCH 0221/1112] #9464 remove unused import --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index fabb33e328a..557b7df202b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -128,7 +128,6 @@ import java.util.Optional; import java.util.stream.Collectors; import jakarta.servlet.http.HttpServletResponse; -import jakarta.validation.constraints.NotNull; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.StreamingOutput; From d029cacc9aae5e361869b73f7e76661c5ab8d549 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2023 11:35:28 -0500 Subject: [PATCH 0222/1112] remove extra whitespace #10112 --- src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 69a87869fe1..a944c6aa926 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -27,12 +27,12 @@ public class ProvIT { private static boolean provEnabled = false; @BeforeAll - public static void setUpClass() { + public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - + provEnabled = provCollectionStatus.getStatusCode() == 200; - if(!provEnabled){ + if (!provEnabled) { UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); } } @@ -99,7 +99,7 @@ public void testFreeformDraftActions() { } @Test - public void testAddProvFile() { + public void testAddProvFile() { Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); From c09034d638147c5cd618e5ff4a460e1840b8cd0a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2023 11:37:16 -0500 Subject: [PATCH 0223/1112] organize imports #10112 --- .../java/edu/harvard/iq/dataverse/api/ProvIT.java | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index a944c6aa926..33323ff4239 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -1,27 +1,23 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.OK; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import org.junit.jupiter.api.AfterAll; - +import static jakarta.ws.rs.core.Response.Status.OK; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; - +import org.junit.jupiter.api.AfterAll; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; - public class ProvIT { private static boolean provEnabled = false; From a3d323599be4bcc6ad688a8b99135bd4447fbb02 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2023 16:07:53 -0500 Subject: [PATCH 0224/1112] various improvements to the QA Guide #10101 --- doc/sphinx-guides/source/developers/intro.rst | 2 + .../source/developers/testing.rst | 4 + .../source/developers/version-control.rst | 2 + doc/sphinx-guides/source/qa/index.md | 4 +- doc/sphinx-guides/source/qa/manual-testing.md | 31 +++---- .../source/qa/other-approaches.md | 91 +++++++++---------- doc/sphinx-guides/source/qa/overview.md | 15 ++- .../source/qa/performance-tests.md | 6 +- .../source/qa/test-automation-integration.md | 35 ------- .../source/qa/test-automation.md | 35 +++++++ .../source/qa/testing-infrastructure.md | 12 ++- 11 files changed, 119 insertions(+), 118 deletions(-) delete mode 100644 doc/sphinx-guides/source/qa/test-automation-integration.md create mode 100644 doc/sphinx-guides/source/qa/test-automation.md diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index a01a8066897..3eddfbe8d2d 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -37,6 +37,8 @@ Roadmap For the Dataverse Software development roadmap, please see https://www.iq.harvard.edu/roadmap-dataverse-project +.. _kanban-board: + Kanban Board ------------ diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index abecaa09fad..57733f25406 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -426,6 +426,10 @@ target/coverage-it/index.html is the place to start reading the code coverage re Load/Performance Testing ------------------------ +See also :doc:`/qa/performance-tests` in the QA Guide. + +.. _locust: + Locust ~~~~~~ diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index 31fc0a4e602..f46411ebd7f 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -34,6 +34,8 @@ The "master" Branch The "`master `_" branch represents released versions of the Dataverse Software. As mentioned in the :doc:`making-releases` section, at release time we update the master branch to include all the code for that release. Commits are never made directly to master. Rather, master is updated only when we merge code into it from the "develop" branch. +.. _develop-branch: + The "develop" Branch ******************** diff --git a/doc/sphinx-guides/source/qa/index.md b/doc/sphinx-guides/source/qa/index.md index c190d823bef..08deb7ee27d 100644 --- a/doc/sphinx-guides/source/qa/index.md +++ b/doc/sphinx-guides/source/qa/index.md @@ -5,6 +5,6 @@ overview.md testing-infrastructure.md performance-tests.md manual-testing.md -test-automation-integration.md +test-automation.md other-approaches.md -``` \ No newline at end of file +``` diff --git a/doc/sphinx-guides/source/qa/manual-testing.md b/doc/sphinx-guides/source/qa/manual-testing.md index bf6f16f7911..9f365aae59f 100644 --- a/doc/sphinx-guides/source/qa/manual-testing.md +++ b/doc/sphinx-guides/source/qa/manual-testing.md @@ -9,23 +9,23 @@ We use a risk-based, manual testing approach to achieve the most benefit with li If it seems possible through user error or some other occurrence that such a serious failure will occur, we try to make it happen in the test environment. If the code has a UI component, we also do a limited amount of browser compatibility testing using Chrome, Firefox, and Safari browsers. We do not currently do UX or accessibility testing on a regular basis, though both have been done product-wide by the Design group and by the community. -## Examining a Pull Pequest for Test Cases: +## Examining a Pull Request for Test Cases -### What Problem Does it Solve? +### What Problem Does It Solve? -Read the top part of the pull request for a description, notes for reviewers, and usually a how-to test section. Does it make sense? If not, read the underlying ticket it closes, and any release notes or documentation. Knowing in general what it does helps you to think about how to approach it. +Read the top part of the pull request for a description, notes for reviewers, and usually a "how to test" section. Does it make sense? If not, read the underlying issue it closes, and any release notes or documentation. Knowing in general what it does helps you to think about how to approach it. -### How is it Configured? +### How is It Configured? -Most pull requests do not have any special configuration and are enabled on deployment, but some do. Configuration is part of testing. An admin will need to follow these instructions so try them out. Plus, that is the only way you will get it working to test it! +Most pull requests do not have any special configuration and are enabled on deployment, but some do. Configuration is part of testing. A sysadmin or superuser will need to follow these instructions so try them out. Plus, that is the only way you will get it working to test it! -Identify test cases by examining the problem report or feature description and any documentation of functionality. Look for statements or assertions about functions, what it does, as well as conditions or conditional behavior. These become your test cases. Think about how someone might make a mistake using it and try it. Does it fail gracefully or in a confusing or worse, damaging manner? Also, consider whether this pull request may interact with other functionality and try some spot checks there. For instance, if new metadata fields are added, try the export feature. Of course, try the suggestions under how to test. Those may be sufficient, but you should always think about it based on what it does. +Identify test cases by examining the problem report or feature description and any documentation of functionality. Look for statements or assertions about functions, what it does, as well as conditions or conditional behavior. These become your test cases. Think about how someone might make a mistake using it and try it. Does it fail gracefully or in a confusing or worse, damaging manner? Also, consider whether this pull request may interact with other functionality and try some spot checks there. For instance, if new metadata fields are added, try the export feature. Of course, try the suggestions under "how to test." Those may be sufficient, but you should always think about the pull request based on what it does. Try adding, modifying, and deleting any objects involved. This is probably covered by using the feature but a good basic approach to keep in mind. -Make sure any server logging is appropriate. You should tail the server log while running your tests. Watch for unreported errors or stack traces especially chatty logging. If you do find a bug you will need to report the stack trace from the server.log +Make sure any server logging is appropriate. You should tail the server log while running your tests. Watch for unreported errors or stack traces especially chatty logging. If you do find a bug you will need to report the stack trace from the server.log. Err on the side of providing the developer too much of server.log rather than too little. -Exercise the UI if there is one. I tend to use Chrome for most of my basic testing as it’s used twice as much as the next most commonly used browser, according to our site’s Google Analytics. I first go through all the options in the UI. Then, if all works, I’ll spot-check using Firefox and Safari. +Exercise the UI if there is one. We tend to use Chrome for most of my basic testing as it's used twice as much as the next most commonly used browser, according to our site's Google Analytics. First go through all the options in the UI. Then, if all works, spot-check using Firefox and Safari. Check permissions. Is this feature limited to a specific set of users? Can it be accessed by a guest or by a non-privileged user? How about pasting a privileged page URL into a non-privileged user’s browser? @@ -33,11 +33,10 @@ Think about risk. Is the feature or function part of a critical area such as per ## Smoke Test - -1. Go to the homepage on https://dataverse-internal.iq.harvard.edu. Scroll to the bottom to ensure the build number is the one you intend to test from Jenkins. -2. Create a new user: I use a formulaic name with my initials and date and make the username and password the same, eg. kc080622. -3. Create a dataverse: I use the same username -4. Create a dataset: I use the same username; I fill in the required fields (I do not use a template). -5. Upload 3 different types of files: I use a tabular file, 50by1000.dta, an image file, and a text file. -6. Publish the dataset. -7. Download a file. +1. Go to the homepage on . Scroll to the bottom to ensure the build number is the one you intend to test from Jenkins. +1. Create a new user: It's fine to use a formulaic name with your initials and date and make the username and password the same, eg. kc080622. +1. Create a dataverse: You can use the same username. +1. Create a dataset: You can use the same username; fill in the required fields (do not use a template). +1. Upload 3 different types of files: You can use a tabular file, 50by1000.dta, an image file, and a text file. +1. Publish the dataset. +1. Download a file. diff --git a/doc/sphinx-guides/source/qa/other-approaches.md b/doc/sphinx-guides/source/qa/other-approaches.md index b50d9d0cf11..cf679c3f442 100644 --- a/doc/sphinx-guides/source/qa/other-approaches.md +++ b/doc/sphinx-guides/source/qa/other-approaches.md @@ -1,125 +1,120 @@ -# Other approaches to deploying and testing +# Other Approaches to Deploying and Testing ```{contents} :depth: 3 ``` -This workflow is fine for a single person testing a PR, one at a time. It would be awkward or impossible if there were multiple people wanting to test different PRs at the same time. I’m assuming if a developer is testing, they would likely just deploy to their dev environment. That might be ok but not sure the env is fully configured enough to offer a real-world testing scenario. An alternative might be to spin an EC2 branch on AWS, potentially using sample data. This can take some time so another option might be to spin up a few, persistent AWS instances with sample data this way, one per tester, and just deploy new builds there when you want to test. You could even configure Jenkins projects for each if desired to maintain consistency in how they’re built. +This workflow is fine for a single person testing a PR, one at a time. It would be awkward or impossible if there were multiple people wanting to test different PRs at the same time. If a developer is testing, they would likely just deploy to their dev environment. That might be ok, but is the env is fully configured enough to offer a real-world testing scenario? An alternative might be to spin an EC2 branch on AWS, potentially using sample data. This can take some time so another option might be to spin up a few, persistent AWS instances with sample data this way, one per tester, and just deploy new builds there when you want to test. You could even configure Jenkins projects for each if desired to maintain consistency in how they’re built. -## Tips and tricks +## Tips and Tricks - -- Start testing simply, with the most obvious test. You don’t need to know all your tests upfront. As you gain comfort and understanding of how it works, try more tests until you are done. If it is a complex feature, jot down your tests in an outline format, some beforehand as a guide, and some after as things occur to you. Save the doc in a testing folder (I have one on Google Drive). This potentially will help with future testing. -- When in doubt, ask someone. If you are confused about how something is working, it may be something you have missed, or it could be a documentation issue, or it could be a bug! Talk to the code reviewer and the contributor/developer for their opinion and advice. -- Always tail the server.log file while testing. Open a terminal window to the test instance and tail -F server.log. This helps you get a real-time sense of what the server is doing when you act and makes it easier to identify any stack trace on failure. -- When overloaded, do the simple pull requests first to reduce the queue. It gives you a mental boost to complete something and reduces the perception of the amount of work still to be done. -- When testing a bug fix, try reproducing the bug on the demo before testing the fix, that way you know you are taking the correct steps to verify that the fix worked. -- When testing an optional feature that requires configuration, do a smoke test without the feature configured and then with it configured. That way you know that folks using the standard config are unaffected by the option if they choose not to configure it. -- Back up your DB before applying an irreversible DB update and you are using a persistent/reusable platform. Just in case it fails, and you need to carry on testing something else you can use the backup. +- Start testing simply, with the most obvious test. You don’t need to know all your tests upfront. As you gain comfort and understanding of how it works, try more tests until you are done. If it is a complex feature, jot down your tests in an outline format, some beforehand as a guide, and some after as things occur to you. Save the doc in a testing folder (on Google Drive). This potentially will help with future testing. +- When in doubt, ask someone. If you are confused about how something is working, it may be something you have missed, or it could be a documentation issue, or it could be a bug! Talk to the code reviewer and the contributor/developer for their opinion and advice. +- Always tail the server.log file while testing. Open a terminal window to the test instance and `tail -F server.log`. This helps you get a real-time sense of what the server is doing when you act and makes it easier to identify any stack trace on failure. +- When overloaded, do the simple pull requests first to reduce the queue. It gives you a mental boost to complete something and reduces the perception of the amount of work still to be done. +- When testing a bug fix, try reproducing the bug on the demo before testing the fix, that way you know you are taking the correct steps to verify that the fix worked. +- When testing an optional feature that requires configuration, do a smoke test without the feature configured and then with it configured. That way you know that folks using the standard config are unaffected by the option if they choose not to configure it. +- Back up your DB before applying an irreversible DB update and you are using a persistent/reusable platform. Just in case it fails, and you need to carry on testing something else you can use the backup. ## Workflow for Completing QA on a PR +1. Assign the PR you are working on to yourself. -1. Assign the PR you are working on to yourself. - -2. What does it do? +1. What does it do? Read the description at the top of the PR, any release notes, documentation, and the original issue. -3. Does it address the issue it closes? +1. Does it address the issue it closes? The PR should address the issue entirely unless otherwise noted. -4. How do you test it? +1. How do you test it? - Look at the “how to test section” at the top of the pull request. Does it make sense? This likely won’t be the only testing you perform. You can develop further tests from the original issue or problem description, from the description of functionality, the documentation, configuration, and release notes. Also consider trying to reveal bugs by trying to break it: try bad or missing data, very large values or volume of data, exceed any place that may have a limit or boundary. + Look at the “how to test" section at the top of the pull request. Does it make sense? This likely won’t be the only testing you perform. You can develop further tests from the original issue or problem description, from the description of functionality, the documentation, configuration, and release notes. Also consider trying to reveal bugs by trying to break it: try bad or missing data, very large values or volume of data, exceed any place that may have a limit or boundary. -5. Does it have or need documentation? +1. Does it have or need documentation? - Small changes or fixes usually don’t have doc but new features or extensions of a feature or new configuration options should have documentation. + Small changes or fixes usually don’t have docs but new features or extensions of a feature or new configuration options should have documentation. -6. Does it have or need release notes? +1. Does it have or need release notes? Same as for doc, just a heads up to an admin for something of note or especially upgrade instructions as needed. -7. Does it use a DB, flyway script? +1. Does it use a DB, Flyway script? Good to know since it may collide with another existing one by version or it could be a one way transform of your DB so back up your test DB before. Also, happens during deployment so be on the lookout for any issues. -8. Validate the documentation. +1. Validate the documentation. Build the doc using Jenkins, does it build without errors? Read it through for sense. Use it for test cases and to understand the feature. -9. Build and deploy the pull request. +1. Build and deploy the pull request. Normally this is done using Jenkins and automatically deployed to the QA test machine. -10. Configure if required +1. Configure if required If needed to operate and everyone installing or upgrading will use this, configure now as all testing will use it. -11. Smoke test the branch. +1. Smoke test the branch. Standard, minimal test of core functionality. -12. Regression test-related or potentially affected features +1. Regression test-related or potentially affected features If config is optional and testing without config turned on, do some spot checks/ regression tests of related or potentially affected areas. -13. Configure if optional +1. Configure if optional What is the default, enabled or disabled? Is that clearly indicated? Test both. By config here we mean enabling the functionality versus choosing a particular config option. Some complex features have config options in addition to enabling. Those will also need to be tested. -14. Test all the new or changed functionality. +1. Test all the new or changed functionality. The heart of the PR, what is this PR adding or fixing? Is it all there and working? -15. Regression test related or potentially affected features. +1. Regression test related or potentially affected features. - Sometimes new stuff modifies and extends other functionality or functionality that is shared with other aspects of the system, e.g. Export, Import. Check the underlying functionality that was also modified but in a spot check or briefer manner. + Sometimes new stuff modifies and extends other functionality or functionality that is shared with other aspects of the system, e.g. export, import. Check the underlying functionality that was also modified but in a spot check or briefer manner. -16. Report any issues found within the PR +1. Report any issues found within the PR It can be easy to lose track of what you’ve found, steps to reproduce, and any errors or stack traces from the server log. Add these in a numbered list to a comment in the pr. Easier to check off when fixed and to work on. Add large amounts of text as in the server log as attached, meaningfully named files. -17. Retest all fixes, spot check feature functionality, smoke test +1. Retest all fixes, spot check feature functionality, smoke test Similar to your initial testing, it is only narrower. -18. Test Upgrade Instructions, if required +1. Test upgrade instructions, if required Some features build upon the existing architecture but require modifications, such as adding a new column to the DB or changing or adding data. It is crucial that this works properly for our 100+ installations. This testing should be performed at the least on the prior version with basic data objects (collection, dataset, files) and any other data that will be updated by this feature. Using the sample data from the prior version would be good or deploying to dataverse-internal and upgrading there would be a good test. Remember to back up your DB before doing a transformative upgrade so that you can repeat it later if you find a bug. -19. Make sure the integration tests in the PR have been completed and passed. - +1. Make sure the API tests in the PR have been completed and passed. + They are run with each commit to the PR and take approximately 42 minutes to run. -20. Merge PR +1. Merge PR Click merge to include this PR into the common develop branch. -21. Delete merged branch +1. Delete merged branch Just a housekeeping move if the PR is from IQSS. Click the delete branch button where the merge button had been. There is no deletion for outside contributions. ## Checklist for Completing QA on a PR - 1. Build the docs -2. Smoke test the pr -3. Test the new functionality -4. Regression test -5. Test any upgrade instructions +1. Smoke test the pr +1. Test the new functionality +1. Regression test +1. Test any upgrade instructions ## Checklist for QA on Release - -1. Review Consolidated Release Notes, in particular upgrade instructions. -2. Conduct performance testing and compare with the previous release. -3. Perform clean install and smoke test. -4. Potentially follow upgrade instructions. Though they have been performed incrementally for each PR, the sequence may need checking - +1. Review Consolidated Release Notes, in particular upgrade instructions. +1. Conduct performance testing and compare with the previous release. +1. Perform clean install and smoke test. +1. Potentially follow upgrade instructions. Though they have been performed incrementally for each PR, the sequence may need checking diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index 51b38ee0921..d3364fbbbf9 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -6,11 +6,11 @@ ## Introduction -This document describes the testing process used by QA at IQSS and provides a guide for others filling in for that role. Please note that many variations are possible, and the main thing is to catch bugs and provide a good quality product to the user community. +This guide describes the testing process used by QA at IQSS and provides a reference for others filling in for that role. Please note that many variations are possible, and the main thing is to catch bugs and provide a good quality product to the user community. ## Workflow -The basic workflow is bugs or feature requests are submitted to GitHub by the community or by team members as issues. These issues are prioritized and added to a two-week sprint that is reflected on the GitHub Kanban board. As developers work on these issues, a GitHub branch is produced, code is contributed, and a pull request is made to merge these new changes back into the common develop branch and ultimately released as part of the product. Before a pull request is merged it must be reviewed by a member of the development team from a coding perspective, it must pass automated integration tests before moving to QA. There it is tested manually, exercising the UI using three common browser types and any business logic it implements. Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions are tested. Once this passes and any bugs that are found are corrected, the automated integration tests are confirmed to be passing, the PR is merged into development, the PR is closed, and the branch is deleted. At this point, the pr moves from the QA column automatically into the Done column and the process repeats with the next pr until it is decided to make a release. +The basic workflow is as follows. Bugs or feature requests are submitted to GitHub by the community or by team members as issues. These issues are prioritized and added to a two-week sprint that is reflected on the GitHub {ref}`kanban-board`. As developers work on these issues, a GitHub branch is produced, code is contributed, and a pull request is made to merge these new changes back into the common {ref}`develop branch ` and ultimately released as part of the product. Before a pull request is moved to QA, it must be reviewed by a member of the development team from a coding perspective, and it must pass automated tests. There it is tested manually, exercising the UI (using three common browsers) and any business logic it implements. Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions made in that documentation are tested. Once this passes and any bugs that are found are corrected, and the automated tests are confirmed to be passing, the PR is merged into the develop, the PR is closed, and the branch is deleted (if it is local). At this point, the PR moves from the QA column automatically into the Done column and the process repeats with the next PR until it is decided to {doc}`make a release `. ## Release Cadence and Sprints @@ -20,13 +20,10 @@ The decision to make a release can be based on the time since the last release, ## Performance Testing and Deployment -The final testing activity before producing a release is performance testing. This could be done throughout the release cycle but since it is time-consuming it is done once near the end. Using a load-generating tool named Locust, it loads the statistically most loaded pages, according to Google Analytics, that is 50% homepage and 50% some type of dataset page. Since dataset page weight also varies by the number of files, a selection of about 10 datasets with varying file counts is used. The pages are called randomly as a guest user with increasing levels of user load, from 1 user to 250 users. Typical daily loads in production are around the 50-user level. Though the simulated user level does have a modest amount of random think time before repeated calls, from 5-20 seconds (I believe), it is not a real-world load so direct comparisons to production are not reliable. Instead, we compare performance to prior versions of the product and based on how that performed in production we have some idea whether this might be similar in performance or whether there is some undetected issue that appears under load, such as inefficient or too many DB queries per page. +The final testing activity before producing a release is performance testing. This could be done throughout the release cycle but since it is time-consuming it is done once near the end. Using a load-generating tool named {ref}`Locust `, it loads the statistically most loaded pages, according to Google Analytics, that is 50% homepage and 50% some type of dataset page. Since dataset page weight also varies by the number of files, a selection of about 10 datasets with varying file counts is used. The pages are called randomly as a guest user with increasing levels of user load, from 1 user to 250 users. Typical daily loads in production are around the 50-user level. Though the simulated user level does have a modest amount of random think time before repeated calls, from 5-20 seconds, it is not a real-world load so direct comparisons to production are not reliable. Instead, we compare performance to prior versions of the product, and based on how that performed in production we have some idea whether this might be similar in performance or whether there is some undetected issue that appears under load, such as inefficient or too many DB queries per page. -Once the performance has been tested and recorded in a Google spreadsheet for this proposed version, the release will be prepared and posted. +Once the performance has been tested and recorded in a [Google spreadsheet](https://docs.google.com/spreadsheets/d/1lwPlifvgu3-X_6xLwq6Zr6sCOervr1mV_InHIWjh5KA/edit?usp=sharing) for this proposed version, the release will be prepared and posted. -Preparing the release consists of writing and reviewing the release notes compiled from individual notes in PRs that have been merged for this release. A PR is made for the notes and merged. Next, increment the version numbers in certain code files, produce a PR with those changes, and merge that into the common development branch. Last, a PR is made to merge and develop into the master branch. Once that is merged a guide build with the new release version is made from the master branch. Last, a release war file is built from the master and an installer is built from the master branch and includes the newly built war file. - -Publishing the release consists of creating a new draft release on GitHub, posting the release notes, uploading the .war file and the installer .zip file, and any ancillary files used to configure this release. The latest link for the guides should be updated on the guides server to point to the newest version. Once that is all in place, specify the version name and the master branch at the top of the GitHub draft release and publish. This will tag the master branch with the version number and make the release notes and files available to the public. - -Once released, post to Dataverse general about the release and when possible, deploy to demo and production. +## Making a Release +See {doc}`/developers/making-releases` in the Developer Guide. diff --git a/doc/sphinx-guides/source/qa/performance-tests.md b/doc/sphinx-guides/source/qa/performance-tests.md index 7075d7f1776..a5981dcfbe9 100644 --- a/doc/sphinx-guides/source/qa/performance-tests.md +++ b/doc/sphinx-guides/source/qa/performance-tests.md @@ -10,7 +10,7 @@ To run performance tests, we have a performance test cluster on AWS that employs ## Access -Access to performance cluster instances requires ssh keys, see Leonid. The cluster itself is normally not running to reduce costs. To turn on the cluster, log on to the demo server and run the perfenv scripts from the centos default user dir. Access to the demo requires an ssh key, see Leonid. +Access to performance cluster instances requires ssh keys. The cluster itself is normally not running to reduce costs. To turn on the cluster, log on to the demo server and run the perfenv scripts from the centos default user dir. Access to the demo requires an ssh key, see Leonid. ## Special Notes ⚠️ @@ -19,6 +19,4 @@ Please note the performance database is also used occasionally by Julian and the Executing the Performance Script -------------------------------- -To execute the performance test script, you need to install a local copy of the database-helper-scripts project (https://github.com/IQSS/dataverse-helper-scripts), written by Raman. I have since produced a stripped-down script that calls just the DB and ds and works with python3. - -The automated integration test runs happen on each commit to a PR on an AWS instance and should be reviewed to be passing before merging into development. Their status can be seen on the PR page near the bottom, above the merge button. See Don Sizemore or Phil for questions. +To execute the performance test script, you need to install a local copy of the database-helper-scripts project at . We have since produced a stripped-down script that calls just the DB and ds and works with python3. diff --git a/doc/sphinx-guides/source/qa/test-automation-integration.md b/doc/sphinx-guides/source/qa/test-automation-integration.md deleted file mode 100644 index 5e9d00cd461..00000000000 --- a/doc/sphinx-guides/source/qa/test-automation-integration.md +++ /dev/null @@ -1,35 +0,0 @@ -# Test automation and integration test - -```{contents} -:depth: 3 -``` - -This test suite is added to and maintained by development. It is generally advisable for code contributors to add integration tests when adding new functionality. The approach here is one of code coverage: exercise as much of the code base’s code paths as possible, every time to catch bugs. - -This type of approach is often used to give contributing developers confidence that their code didn’t introduce any obvious, major issues and is run on each commit. Since it is a broad set of tests, it is not clear whether any specific, conceivable test is run but it does add a lot of confidence that the code base is functioning due to its reach and consistency. - -## Building and Deploying a Pull Request from Jenkins to Dataverse-Internal: - - -1. Log on to GitHub, go to projects, dataverse to see Kanban board, select a pull request to test from the QA queue. - -2. From the pull request page, click the copy icon next to the pull request branch name. - -3. Log on to jenkins.dataverse.org, select the IQSS_Dataverse_Internal project, and configure the repository URL and branch specifier to match the ones from the pull request. For example: - - * 8372-gdcc-xoai-library has IQSS implied - - **Repository URL:** https://github.com/IQSS/dataverse.git - - **Branch specifier:** */8372-gdcc-xoai-library - * GlobalDataverseCommunityConsortium:GDCC/DC-3B - - **Repository URL:** https://github.com/GlobalDataverseCommunityConsortium/dataverse.git - - **Branch specifier:** */GDCC/DC-3B. - -4. Click Build Now and note the build number in progress. - -5. Once complete, go to https://dataverse-internal.iq.harvard.edu and check that the deployment succeeded, and that the homepage displays the latest build number. - -6. If for some reason it didn’t deploy, check the server.log file. It may just be a caching issue so try un-deploying, deleting cache, restarting, and re-deploying on the server (su - dataverse, /usr/local/payara5/bin/asadmin list-applications, /usr/local/payara5/bin/asadmin undeploy dataverse-5.11.1, /usr/local/payara5/bin/asadmin deploy /tmp/dataverse-5.11.1.war) - -7. If that didn’t work, you may have run into a flyway DB script collision error but that should be indicated by the server.log - -8. Assuming the above steps worked, and they should 99% of the time, test away! Note: be sure to tail -F server.log in a terminal window while you are doing any testing. This way you can spot problems that may not appear in the UI and have easier access to any stack traces for easier reporting. \ No newline at end of file diff --git a/doc/sphinx-guides/source/qa/test-automation.md b/doc/sphinx-guides/source/qa/test-automation.md new file mode 100644 index 00000000000..ba8e5296d47 --- /dev/null +++ b/doc/sphinx-guides/source/qa/test-automation.md @@ -0,0 +1,35 @@ +# Test Automation + +```{contents} +:depth: 3 +``` + +The API test suite is added to and maintained by development. (See {doc}`/developers/testing` in the Developer Guide.) It is generally advisable for code contributors to add API tests when adding new functionality. The approach here is one of code coverage: exercise as much of the code base's code paths as possible, every time to catch bugs. + +This type of approach is often used to give contributing developers confidence that their code didn’t introduce any obvious, major issues and is run on each commit. Since it is a broad set of tests, it is not clear whether any specific, conceivable test is run but it does add a lot of confidence that the code base is functioning due to its reach and consistency. + +## Building and Deploying a Pull Request from Jenkins to Dataverse-Internal + + +1. Log on to GitHub, go to projects, dataverse to see Kanban board, select a pull request to test from the QA queue. + +1. From the pull request page, click the copy icon next to the pull request branch name. + +1. Log on to , select the `IQSS_Dataverse_Internal` project, and configure the repository URL and branch specifier to match the ones from the pull request. For example: + + * 8372-gdcc-xoai-library has IQSS implied + - **Repository URL:** https://github.com/IQSS/dataverse.git + - **Branch specifier:** */8372-gdcc-xoai-library + * GlobalDataverseCommunityConsortium:GDCC/DC-3B + - **Repository URL:** https://github.com/GlobalDataverseCommunityConsortium/dataverse.git + - **Branch specifier:** */GDCC/DC-3B. + +1. Click "Build Now" and note the build number in progress. + +1. Once complete, go to and check that the deployment succeeded, and that the homepage displays the latest build number. + +1. If for some reason it didn’t deploy, check the server.log file. It may just be a caching issue so try un-deploying, deleting cache, restarting, and re-deploying on the server (`su - dataverse` then `/usr/local/payara5/bin/asadmin list-applications; /usr/local/payara5/bin/asadmin undeploy dataverse-5.11.1; /usr/local/payara5/bin/asadmin deploy /tmp/dataverse-5.11.1.war`) + +1. If that didn't work, you may have run into a Flyway DB script collision error but that should be indicated by the server.log. See {doc}`/developers/sql-upgrade-scripts` in the Developer Guide. + +1. Assuming the above steps worked, and they should 99% of the time, test away! Note: be sure to `tail -F server.log` in a terminal window while you are doing any testing. This way you can spot problems that may not appear in the UI and have easier access to any stack traces for easier reporting. diff --git a/doc/sphinx-guides/source/qa/testing-infrastructure.md b/doc/sphinx-guides/source/qa/testing-infrastructure.md index fb66bc4d099..45b3b360ac7 100644 --- a/doc/sphinx-guides/source/qa/testing-infrastructure.md +++ b/doc/sphinx-guides/source/qa/testing-infrastructure.md @@ -6,10 +6,14 @@ ## Dataverse Internal -To build and test a PR, we use a build named IQSS_Dataverse_Internal on jenkins.dataverse.org, which deploys the .war file to an AWS instance named dataverse-internal.iq.harvard.edu. -Login to Jenkins requires a username and password. Check with Don Sizemore. Login to the dataverse-internal server requires a key, see Leonid. +To build and test a PR, we use a build named `IQSS_Dataverse_Internal` on , which deploys the .war file to an AWS instance named . ## Guides Server -There is also a guides build project named guides.dataverse.org. Any test builds of guides are deployed to a named directory** on guides.dataverse.org and can be found and tested by going to the existing guides, removing the part of the URL that contains the version, and browsing the resulting directory listing for the latest change. -Login to the guides server requires a key, see Don Sizemore. +There is also a guides build project named `guides.dataverse.org`. Any test builds of guides are deployed to a named directory on guides.dataverse.org and can be found and tested by going to the existing guides, removing the part of the URL that contains the version, and browsing the resulting directory listing for the latest change. + +Note that changes to guides can also be previewed on Read the Docs. In the pull request, look for a link like . This Read the Docs preview is also mentioned under also {doc}`/developers/documentation`. + +## Other Servers + +We can spin up additional AWS EC2 instances as needed. See {doc}`/developers/deployment` in the Developer Guide for the scripts we use. From 7650eb308ed5cb8805981e77b252ceb2e3c760c2 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 13 Nov 2023 16:35:25 -0500 Subject: [PATCH 0225/1112] Removes the title from content and add label --- doc/sphinx-guides/source/qa/manual-testing.md | 3 ++- doc/sphinx-guides/source/qa/other-approaches.md | 3 ++- doc/sphinx-guides/source/qa/overview.md | 3 ++- doc/sphinx-guides/source/qa/performance-tests.md | 3 ++- doc/sphinx-guides/source/qa/test-automation.md | 3 ++- doc/sphinx-guides/source/qa/testing-infrastructure.md | 3 ++- 6 files changed, 12 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/qa/manual-testing.md b/doc/sphinx-guides/source/qa/manual-testing.md index 9f365aae59f..580e5153394 100644 --- a/doc/sphinx-guides/source/qa/manual-testing.md +++ b/doc/sphinx-guides/source/qa/manual-testing.md @@ -1,6 +1,7 @@ # Manual Testing Approach -```{contents} +```{contents} Contents: +:local: :depth: 3 ``` ## Introduction diff --git a/doc/sphinx-guides/source/qa/other-approaches.md b/doc/sphinx-guides/source/qa/other-approaches.md index cf679c3f442..2e2ef906191 100644 --- a/doc/sphinx-guides/source/qa/other-approaches.md +++ b/doc/sphinx-guides/source/qa/other-approaches.md @@ -1,6 +1,7 @@ # Other Approaches to Deploying and Testing -```{contents} +```{contents} Contents: +:local: :depth: 3 ``` diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index d3364fbbbf9..c4f66446ca3 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -1,6 +1,7 @@ # Overview -```{contents} +```{contents} Contents: +:local: :depth: 3 ``` diff --git a/doc/sphinx-guides/source/qa/performance-tests.md b/doc/sphinx-guides/source/qa/performance-tests.md index a5981dcfbe9..f433226d4ff 100644 --- a/doc/sphinx-guides/source/qa/performance-tests.md +++ b/doc/sphinx-guides/source/qa/performance-tests.md @@ -1,6 +1,7 @@ # Performance Testing -```{contents} +```{contents} Contents: +:local: :depth: 3 ``` diff --git a/doc/sphinx-guides/source/qa/test-automation.md b/doc/sphinx-guides/source/qa/test-automation.md index ba8e5296d47..c2b649df498 100644 --- a/doc/sphinx-guides/source/qa/test-automation.md +++ b/doc/sphinx-guides/source/qa/test-automation.md @@ -1,6 +1,7 @@ # Test Automation -```{contents} +```{contents} Contents: +:local: :depth: 3 ``` diff --git a/doc/sphinx-guides/source/qa/testing-infrastructure.md b/doc/sphinx-guides/source/qa/testing-infrastructure.md index 45b3b360ac7..7a4bda626fc 100644 --- a/doc/sphinx-guides/source/qa/testing-infrastructure.md +++ b/doc/sphinx-guides/source/qa/testing-infrastructure.md @@ -1,6 +1,7 @@ # Infrastructure for Testing -```{contents} +```{contents} Contents: +:local: :depth: 3 ``` From 75789e0f94d36fce1270b0714bd5e516f356d8ee Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 13 Nov 2023 19:06:26 -0500 Subject: [PATCH 0226/1112] current state of the flyway script (work in progress/likely to change) #8549 --- .../V6.0.0.3__8549-collection-quotas.sql | 70 +++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 src/main/resources/db/migration/V6.0.0.3__8549-collection-quotas.sql diff --git a/src/main/resources/db/migration/V6.0.0.3__8549-collection-quotas.sql b/src/main/resources/db/migration/V6.0.0.3__8549-collection-quotas.sql new file mode 100644 index 00000000000..f74d9bebe30 --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.3__8549-collection-quotas.sql @@ -0,0 +1,70 @@ +-- Storage size column added: +ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS storagesize BIGINT; + +-- (work in progress! the table structure may change/the column may be moved out into +-- its own table. but the mechanics of the recursion are working) + +-- The somewhat convoluted queries below populate the storage sizes for the entire +-- DvObject tree, fast. It IS possible, to do it all with one recursive PostgresQL +-- query, that will crawl the tree from the leaves (DataFiles) up and add up the +-- sizes for all the Datasets/Collections above. Unfortunately, that takes some hours +-- on a database the size of the one at IQSS. So what we are doing instead is compute +-- the total sizes of all the *directly* linked objects, with 3 linear queries. This +-- will correctly calculate the sizes of all the Datasets (since they can only +-- contain DataFiles, directly, without any extra hierarchy possible) and those +-- Collections that only contain Datasets; but not the sizes of Collections that +-- have sub-collections. To take any sub-collections into account we are then running +-- a recursive query - but then we only need to run it on the tree of Collections only, +-- which should make it manageably fast on any real life instance. + +UPDATE dvobject SET storagesize=0; +-- For datafiles, the storage size = main file size by default: +-- (we are excluding any harvested files) +UPDATE dvobject SET storagesize=COALESCE(f.filesize,0) FROM datafile f, dataset d WHERE f.id = dvobject.id AND dvobject.owner_id = d.id AND d.harvestingclient_id IS null; +-- ... but for ingested tabular files the size of the saved original needs to be added, since +-- those also take space: +-- (should be safe to assume that there are no *harvested ingested* files) +UPDATE dvobject SET storagesize=dvobject.storagesize + COALESCE(datatable.originalFileSize,0) FROM datatable WHERE datatable.datafile_id = dvobject.id; +-- Now we can calculate storage sizes of each individual dataset (a simple sum +-- of the storage sizes of all the files in the dataset): +-- (excluding the harvested datasets; this is less important, since there should be +-- significantly fewer datasets than files, but might as well) +UPDATE dvobject SET storagesize=o.combinedStorageSize +FROM (SELECT datasetobject.id, SUM(fileobject.storagesize) AS combinedStorageSize +FROM dvobject fileobject, dvobject datasetobject +WHERE fileobject.owner_id = datasetobject.id +GROUP BY datasetobject.id) o, dataset ds WHERE o.id = dvobject.id AND dvobject.dtype='Dataset' AND dvobject.id = ds.id AND ds.harvestingclient_id IS null; +-- ... and then we can repeat the same for collections, by setting the storage size +-- to the sum of the storage sizes of the datasets *directly* in each collection: +-- (no attemp is made yet to recursively count the sizes all the chilld sub-collections) +UPDATE dvobject SET storagesize=o.combinedStorageSize +FROM (SELECT collectionobject.id, SUM(datasetobject.storagesize) AS combinedStorageSize +FROM dvobject datasetobject, dvobject collectionobject +WHERE datasetobject.owner_id = collectionobject.id +AND datasetobject.storagesize IS NOT null +GROUP BY collectionobject.id) o WHERE o.id = dvobject.id AND dvobject.dtype='Dataverse'; + +-- And now we will update the storage sizes of all the Collection ("Dataverse") objects +-- that contain sub-collections, *recursively*, to add their sizes to the totals: +WITH RECURSIVE treestorage (id, owner_id, storagesize, dtype) AS +( + -- All dataverses: + SELECT id, owner_id, storagesize, dtype + FROM dvobject + WHERE dtype = 'Dataverse' + + UNION + + -- Recursive Member: + SELECT dvobject.id, treestorage.owner_id, dvobject.storagesize, treestorage.dtype + FROM treestorage, dvobject + WHERE treestorage.id = dvobject.owner_id + AND dvobject.dtype = 'Dataverse' +) + +UPDATE dvobject SET storagesize=storagesize+(SELECT COALESCE(SUM(storagesize),0) +FROM treestorage WHERE owner_id=dvobject.id) +--FROM treestorage ts +--WHERE ts.owner_id=dvobject.id +WHERE dvobject.dtype = 'Dataverse' +AND dvobject.id IN (SELECT owner_id FROM treestorage WHERE owner_id IS NOT null); From c49036bf3d67d22cec384a8fe4f7cb23ed3d9a46 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 15 Nov 2023 12:06:43 +0000 Subject: [PATCH 0227/1112] Added: includeDeaccessioned support to getDatasetVersionCitation API endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 9 ++++++-- .../harvard/iq/dataverse/api/DatasetsIT.java | 21 ++++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 3 ++- 3 files changed, 29 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 292aba0cee3..68c618b0f1f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3995,9 +3995,14 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken" @GET @AuthRequired @Path("{id}/versions/{versionId}/citation") - public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response(req -> ok( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getCitation(true, false)), getRequestUser(crc)); + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned).getCitation(true, false)), getRequestUser(crc)); } @POST diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 56bf53c1c99..d20f1e8a58b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3371,13 +3371,32 @@ public void getDatasetVersionCitation() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, apiToken); + Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, false, apiToken); getDatasetVersionCitationResponse.prettyPrint(); getDatasetVersionCitationResponse.then().assertThat() .statusCode(OK.getStatusCode()) // We check that the returned message contains information expected for the citation string .body("data.message", containsString("DRAFT VERSION")); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getDatasetVersionCitationNotDeaccessioned = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); + getDatasetVersionCitationNotDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getDatasetVersionCitationDeaccessioned = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_LATEST_PUBLISHED, true, apiToken); + getDatasetVersionCitationDeaccessioned.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", containsString("DEACCESSIONED VERSION")); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e3a7fd0cfc3..2336bf8beb8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3345,10 +3345,11 @@ static Response getPrivateUrlDatasetVersionCitation(String privateUrlToken) { return response; } - static Response getDatasetVersionCitation(Integer datasetId, String version, String apiToken) { + static Response getDatasetVersionCitation(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json") + .queryParam("includeDeaccessioned", includeDeaccessioned) .get("/api/datasets/" + datasetId + "/versions/" + version + "/citation"); return response; } From 75ff2fbad275a4543525ac0dc62f65d3eaa0e5c1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 15 Nov 2023 12:10:14 +0000 Subject: [PATCH 0228/1112] Added: API docs for #10104 --- doc/sphinx-guides/source/api/native-api.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 1992390410c..2e3a0b2af08 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2502,6 +2502,16 @@ Get Citation curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER" +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER&includeDeaccessioned=true" + Get Citation by Private URL Token ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From be631af6e5fd5dd181aebdb0ee8a2dd1da3ff789 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 15 Nov 2023 12:12:31 +0000 Subject: [PATCH 0229/1112] Added: release notes for #10104 --- doc/release-notes/10104-dataset-citation-deaccessioned.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10104-dataset-citation-deaccessioned.md diff --git a/doc/release-notes/10104-dataset-citation-deaccessioned.md b/doc/release-notes/10104-dataset-citation-deaccessioned.md new file mode 100644 index 00000000000..0ba06d729c4 --- /dev/null +++ b/doc/release-notes/10104-dataset-citation-deaccessioned.md @@ -0,0 +1 @@ +The getDatasetVersionCitation (/api/datasets/{id}/versions/{versionId}/citation) endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. From 2fb81f6b5e1a5c735b937600b0dd74ee47d236a1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 15 Nov 2023 10:01:52 -0500 Subject: [PATCH 0230/1112] altering circuit breakers for qa --- conf/solr/9.3.0/solrconfig.xml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/conf/solr/9.3.0/solrconfig.xml b/conf/solr/9.3.0/solrconfig.xml index b89315cdaa9..9705faa7009 100644 --- a/conf/solr/9.3.0/solrconfig.xml +++ b/conf/solr/9.3.0/solrconfig.xml @@ -588,10 +588,10 @@ check for "Circuit Breakers tripped" in logs and the corresponding error message should tell you what transpired (if the failure was caused by tripped circuit breakers). --> - + 5 + - + 5 + - + + - + + From 74d36b64d0fc36afafa5382952050239737ebe1a Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 16 Nov 2023 11:24:30 -0500 Subject: [PATCH 0232/1112] #9686 preliminary check in --- .../java/edu/harvard/iq/dataverse/Dataset.java | 14 +------------- .../java/edu/harvard/iq/dataverse/DvObject.java | 17 +++++++++++++++++ .../V6.0.0.3__9686-move-harvestingclient-id.sql | 8 ++++++++ 3 files changed, 26 insertions(+), 13 deletions(-) create mode 100644 src/main/resources/db/migration/V6.0.0.3__9686-move-harvestingclient-id.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 245bdf0efd2..ad72ada20e9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -742,21 +742,9 @@ public void setDatasetExternalCitations(List datasetEx this.datasetExternalCitations = datasetExternalCitations; } - @ManyToOne - @JoinColumn(name="harvestingClient_id") - private HarvestingClient harvestedFrom; - - public HarvestingClient getHarvestedFrom() { - return this.harvestedFrom; - } - public void setHarvestedFrom(HarvestingClient harvestingClientConfig) { - this.harvestedFrom = harvestingClientConfig; - } - public boolean isHarvested() { - return this.harvestedFrom != null; - } + private String harvestIdentifier; diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 9e7f3f3fe96..16237203d78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import java.sql.Timestamp; @@ -351,6 +352,22 @@ public GlobalId getGlobalId() { return globalId; } + @ManyToOne + @JoinColumn(name="harvestingClient_id") + private HarvestingClient harvestedFrom; + + public HarvestingClient getHarvestedFrom() { + return this.harvestedFrom; + } + + public void setHarvestedFrom(HarvestingClient harvestingClientConfig) { + this.harvestedFrom = harvestingClientConfig; + } + + public boolean isHarvested() { + return this.harvestedFrom != null; + } + public abstract T accept(Visitor v); @Override diff --git a/src/main/resources/db/migration/V6.0.0.3__9686-move-harvestingclient-id.sql b/src/main/resources/db/migration/V6.0.0.3__9686-move-harvestingclient-id.sql new file mode 100644 index 00000000000..23d66701b99 --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.3__9686-move-harvestingclient-id.sql @@ -0,0 +1,8 @@ +ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS harvestingclient_id BIGINT; + +update dvobject dvo set harvestingclient_id = s.harvestingclient_id from +(select id, harvestingclient_id from dataset d) s +where s.id = dvo.id; + +--ALTER TABLE dataset drop COLUMN IF EXISTS harvestingclient_id; + From 5c045120d6660ee0b07501cadfb06aaf9f083f6b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 16 Nov 2023 13:42:51 -0500 Subject: [PATCH 0233/1112] #9686 rename migration script --- ...lient-id.sql => V6.0.0.4__9686-move-harvestingclient-id.sql} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename src/main/resources/db/migration/{V6.0.0.3__9686-move-harvestingclient-id.sql => V6.0.0.4__9686-move-harvestingclient-id.sql} (72%) diff --git a/src/main/resources/db/migration/V6.0.0.3__9686-move-harvestingclient-id.sql b/src/main/resources/db/migration/V6.0.0.4__9686-move-harvestingclient-id.sql similarity index 72% rename from src/main/resources/db/migration/V6.0.0.3__9686-move-harvestingclient-id.sql rename to src/main/resources/db/migration/V6.0.0.4__9686-move-harvestingclient-id.sql index 23d66701b99..0e4c9a58a93 100644 --- a/src/main/resources/db/migration/V6.0.0.3__9686-move-harvestingclient-id.sql +++ b/src/main/resources/db/migration/V6.0.0.4__9686-move-harvestingclient-id.sql @@ -1,7 +1,7 @@ ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS harvestingclient_id BIGINT; update dvobject dvo set harvestingclient_id = s.harvestingclient_id from -(select id, harvestingclient_id from dataset d) s +(select id, harvestingclient_id from dataset d where d.harvestingclient_id is not null) s where s.id = dvo.id; --ALTER TABLE dataset drop COLUMN IF EXISTS harvestingclient_id; From a376b4e3f4bacc8dc651b7048d9a323535dc92f7 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 17 Nov 2023 10:01:33 -0500 Subject: [PATCH 0234/1112] Add condition for 401 when a invalid key is provided and create changelog on API Guide --- doc/sphinx-guides/source/api/changelog.rst | 13 +++++++++++++ doc/sphinx-guides/source/api/index.rst | 1 + .../java/edu/harvard/iq/dataverse/api/AccessIT.java | 11 ++++++----- 3 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 doc/sphinx-guides/source/api/changelog.rst diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst new file mode 100644 index 00000000000..b78d268db33 --- /dev/null +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -0,0 +1,13 @@ +API Changelog +============= + +.. contents:: |toctitle| + :local: + :depth: 1 + +6.0.0 +----- + +Changes +~~~~~~~ + - **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst index c9e79098546..dd195aa9d62 100755 --- a/doc/sphinx-guides/source/api/index.rst +++ b/doc/sphinx-guides/source/api/index.rst @@ -24,3 +24,4 @@ API Guide linkeddatanotification apps faq + changelog \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 42e21e53101..d08f916243f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -198,6 +198,8 @@ public void testDownloadSingleFile() { //Not logged in non-restricted Response anonDownloadOriginal = UtilIT.downloadFileOriginal(tabFile1Id); Response anonDownloadConverted = UtilIT.downloadFile(tabFile1Id); + Response anonDownloadConvertedNullKey = UtilIT.downloadFile(tabFile1Id, null); + // ... and download the same tabular data file, but without the variable name header added: Response anonDownloadTabularNoHeader = UtilIT.downloadTabularFileNoVarHeader(tabFile1Id); // ... and download the same tabular file, this time requesting the "format=tab" explicitly: @@ -206,6 +208,8 @@ public void testDownloadSingleFile() { assertEquals(OK.getStatusCode(), anonDownloadConverted.getStatusCode()); assertEquals(OK.getStatusCode(), anonDownloadTabularNoHeader.getStatusCode()); assertEquals(OK.getStatusCode(), anonDownloadTabularWithFormatName.getStatusCode()); + assertEquals(UNAUTHORIZED.getStatusCode(), anonDownloadConvertedNullKey.getStatusCode()); + int origSizeAnon = anonDownloadOriginal.getBody().asByteArray().length; int convertSizeAnon = anonDownloadConverted.getBody().asByteArray().length; int tabularSizeNoVarHeader = anonDownloadTabularNoHeader.getBody().asByteArray().length; @@ -423,10 +427,7 @@ private HashMap readZipResponse(InputStream iStrea } String name = entry.getName(); -// String s = String.format("Entry: %s len %d added %TD", -// entry.getName(), entry.getSize(), -// new Date(entry.getTime())); -// System.out.println(s); + // Once we get the entry from the zStream, the zStream is // positioned read to read the raw data, and we keep @@ -466,7 +467,7 @@ private HashMap readZipResponse(InputStream iStrea @Test public void testRequestAccess() throws InterruptedException { - + String pathToJsonFile = "scripts/api/data/dataset-create-new.json"; Response createDatasetResponse = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken); createDatasetResponse.prettyPrint(); From 63725d75c115352ff9d0bb94f2e5b6b4d7ca5d05 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 11:07:17 -0500 Subject: [PATCH 0235/1112] remove cruft: mdc logs #9115 --- mdc-logs/raw-mdc-2019-01-07.log | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 mdc-logs/raw-mdc-2019-01-07.log diff --git a/mdc-logs/raw-mdc-2019-01-07.log b/mdc-logs/raw-mdc-2019-01-07.log deleted file mode 100644 index d7a6386160e..00000000000 --- a/mdc-logs/raw-mdc-2019-01-07.log +++ /dev/null @@ -1,6 +0,0 @@ -#Fields: event_time client_ip session_cookie_id user_cookie_id user_id request_url identifier filename size user-agent title publisher publisher_id authors publication_date version other_id target_url publication_year -2019-01-07T15:14:51-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV - - Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:15:15-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV - - Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:16:04-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV - - Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:16:14-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV 168298bae7c-2c5bbc1a9c8c 1 Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:16:19-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV 168298bb8ce-337d8df49763 4026 Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 From 2433114ec7b8430753bc730056a07e24ac0bb5d3 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 11:20:03 -0500 Subject: [PATCH 0236/1112] fix bullet #10060 #10070 --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index b78d268db33..a1cffd84f33 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ - - **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. +- **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. From e0350e735551270f9bd23bfa226b6946282df467 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 17 Nov 2023 11:38:53 -0500 Subject: [PATCH 0237/1112] Change 6.0.0 to 6.0 --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index a1cffd84f33..086ff4a20e5 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -5,7 +5,7 @@ API Changelog :local: :depth: 1 -6.0.0 +6.0 ----- Changes From 437e3b94edf89a2245310709c07d8238c0df4235 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Fri, 17 Nov 2023 11:42:17 -0500 Subject: [PATCH 0238/1112] Update doc/sphinx-guides/source/api/changelog.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 086ff4a20e5..2698ba3debf 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ -- **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. +- **/api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. From 640f69e39f71244b9ba1d7f534180a6b4c8b58cc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 13:19:14 -0500 Subject: [PATCH 0239/1112] add release note for API changelog #10060 --- doc/release-notes/10060-api-changelog.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10060-api-changelog.md diff --git a/doc/release-notes/10060-api-changelog.md b/doc/release-notes/10060-api-changelog.md new file mode 100644 index 00000000000..56ac96e3564 --- /dev/null +++ b/doc/release-notes/10060-api-changelog.md @@ -0,0 +1,3 @@ +We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html + +See also #10060. From 83a66aac65db2f7634b3917d332b0e4253be3c84 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Fri, 17 Nov 2023 14:55:58 -0500 Subject: [PATCH 0240/1112] Update doc/sphinx-guides/source/api/changelog.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 2698ba3debf..f518a9b542d 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ -- **/api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. +- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed to happy (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. From 70edaa789e84c99b110036c232155337afb5c459 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 17 Nov 2023 15:02:32 -0500 Subject: [PATCH 0241/1112] Remove "to happy " --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index f518a9b542d..d6742252d27 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ -- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed to happy (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. +- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. From 73593acb1bcdb9ba1d62e47310753e905b2546dd Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 17 Nov 2023 15:17:28 -0500 Subject: [PATCH 0242/1112] #9464 query by dvo. update IT --- .../dataverse/metrics/MetricsServiceBean.java | 33 ++++++++++--------- .../harvard/iq/dataverse/api/MetricsIT.java | 14 +++++--- 2 files changed, 27 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 79369207963..832dda5ced9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -138,8 +138,8 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat + "from datasetversion\n" + "where versionstate='RELEASED' \n" + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : "and dataset_id in (select dataset.id from dataset, dvobject where dataset.id=dvobject.id\n") - + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NULL and publicationdate is not null\n " : "") - + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NOT NULL\n " : "") + + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NULL and publicationdate is not null\n " : "") + + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NOT NULL\n " : "") + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : ")\n") + "group by dataset_id) as subq group by subq.date order by date;" @@ -156,11 +156,11 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat * @param d */ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { - String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NULL)\n"; + String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NULL)\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated - String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NOT NULL)\n"; + String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NOT NULL)\n"; if (DATA_LOCATION_REMOTE.equals(dataLocation)) { dataLocationLine = harvestBaseLine; // replace } else if (DATA_LOCATION_ALL.equals(dataLocation)) { @@ -189,7 +189,7 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + + "join dvobject on dvobject.id = dataset.id\n" + "where versionstate='RELEASED' \n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + "and \n" @@ -212,8 +212,9 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + " from datasetversion\n" + " join dataset on dataset.id = datasetversion.dataset_id\n" + + " join dvobject on dataset.id = dvobject.id\n" + " where versionstate='RELEASED'\n" + - " and dataset.harvestingclient_id is null\n" + + " and dvobject.harvestingclient_id is null\n" + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + " group by dataset_id\n" + "))\n"; @@ -225,7 +226,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio // so the query is simpler: String harvestOriginClause = "(\n" + " datasetversion.dataset_id = dataset.id\n" + - " AND dataset.harvestingclient_id IS NOT null \n" + + " AND dvobject.harvestingclient_id IS NOT null \n" + " AND date_trunc('month', datasetversion.createtime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + ")\n"; @@ -244,7 +245,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio + "JOIN datasetfieldtype ON datasetfieldtype.id = controlledvocabularyvalue.datasetfieldtype_id\n" + "JOIN datasetversion ON datasetversion.id = datasetfield.datasetversion_id\n" + "JOIN dataset ON dataset.id = datasetversion.dataset_id\n" - + ((d == null) ? "" : "JOIN dvobject ON dvobject.id = dataset.id\n") + + "JOIN dvobject ON dvobject.id = dataset.id\n" + "WHERE\n" + originClause + "AND datasetfieldtype.name = 'subject'\n" @@ -258,11 +259,11 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio } public long datasetsPastDays(int days, String dataLocation, Dataverse d) { - String dataLocationLine = "(releasetime > current_date - interval '" + days + "' day and dataset.harvestingclient_id IS NULL)\n"; + String dataLocationLine = "(releasetime > current_date - interval '" + days + "' day and dvobject.harvestingclient_id IS NULL)\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated - String harvestBaseLine = "(createtime > current_date - interval '" + days + "' day and dataset.harvestingclient_id IS NOT NULL)\n"; + String harvestBaseLine = "(createtime > current_date - interval '" + days + "' day and dvobject.harvestingclient_id IS NOT NULL)\n"; if (DATA_LOCATION_REMOTE.equals(dataLocation)) { dataLocationLine = harvestBaseLine; // replace } else if (DATA_LOCATION_ALL.equals(dataLocation)) { @@ -276,7 +277,7 @@ public long datasetsPastDays(int days, String dataLocation, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + + "join dvobject on dvobject.id = dataset.id\n" + "where versionstate='RELEASED' \n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and \n" @@ -304,7 +305,7 @@ public JsonArray filesTimeSeries(Dataverse d) { + "where datasetversion.id=filemetadata.datasetversion_id\n" + "and versionstate='RELEASED' \n" + "and dataset_id in (select dataset.id from dataset, dvobject where dataset.id=dvobject.id\n" - + "and dataset.harvestingclient_id IS NULL and publicationdate is not null\n " + + "and dvobject.harvestingclient_id IS NULL and publicationdate is not null\n " + ((d == null) ? ")" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + "))\n ") + "group by filemetadata.id) as subq group by subq.date order by date;"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -327,11 +328,11 @@ public long filesToMonth(String yyyymm, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + + "join dvobject on dvobject.id = dataset.id\n" + "where versionstate='RELEASED'\n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" - + "and dataset.harvestingclient_id is null\n" + + "and dvobject.harvestingclient_id is null\n" + "group by dataset_id \n" + ");" ); @@ -350,11 +351,11 @@ public long filesPastDays(int days, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + + "join dvobject on dvobject.id = dataset.id\n" + "where versionstate='RELEASED'\n" + "and releasetime > current_date - interval '" + days + "' day\n" + ((d == null) ? "" : "AND dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") - + "and dataset.harvestingclient_id is null\n" + + "and dvobject.harvestingclient_id is null\n" + "group by dataset_id \n" + ");" ); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java index e3328eefb4a..b961a86dc0b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java @@ -30,7 +30,7 @@ public static void cleanUpClass() { @Test public void testGetDataversesToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-04"; // yyyymm = null; Response response = UtilIT.metricsDataversesToMonth(yyyymm, null); String precache = response.prettyPrint(); @@ -54,7 +54,7 @@ public void testGetDataversesToMonth() { @Test public void testGetDatasetsToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-04"; // yyyymm = null; Response response = UtilIT.metricsDatasetsToMonth(yyyymm, null); String precache = response.prettyPrint(); @@ -77,7 +77,7 @@ public void testGetDatasetsToMonth() { @Test public void testGetFilesToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-04"; // yyyymm = null; Response response = UtilIT.metricsFilesToMonth(yyyymm, null); String precache = response.prettyPrint(); @@ -100,7 +100,7 @@ public void testGetFilesToMonth() { @Test public void testGetDownloadsToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-04"; // yyyymm = null; Response response = UtilIT.metricsDownloadsToMonth(yyyymm, null); String precache = response.prettyPrint(); @@ -283,6 +283,12 @@ public void testGetDatasetsBySubject() { response = UtilIT.metricsDatasetsBySubject("dataLocation=local"); response.then().assertThat() .statusCode(OK.getStatusCode()); + + //Test ok when passing remote + response = UtilIT.metricsDatasetsBySubject("dataLocation=remote"); + response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()); } @Test From d0fc9affdf52dfd60461520adb20a6c7d30e7d6b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 17 Nov 2023 15:31:50 -0500 Subject: [PATCH 0243/1112] refactor to avoid overloaded methods in constructors --- .../AbstractRemoteOverlayAccessIO.java | 335 ++++++++++++++++++ .../dataaccess/GlobusAccessibleStore.java | 4 +- .../dataaccess/GlobusOverlayAccessIO.java | 51 ++- .../dataaccess/RemoteOverlayAccessIO.java | 315 +--------------- .../dataaccess/RemoteOverlayAccessIOTest.java | 1 - 5 files changed, 390 insertions(+), 316 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java new file mode 100644 index 00000000000..8adaf746210 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java @@ -0,0 +1,335 @@ +package edu.harvard.iq.dataverse.dataaccess; + +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.channels.Channel; +import java.nio.file.Path; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.List; +import java.util.function.Predicate; +import java.util.logging.Logger; + +import javax.net.ssl.SSLContext; + +import org.apache.http.Header; +import org.apache.http.client.config.CookieSpecs; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustAllStrategy; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.protocol.HTTP; +import org.apache.http.ssl.SSLContextBuilder; +import org.apache.http.util.EntityUtils; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObject; + +public abstract class AbstractRemoteOverlayAccessIO extends StorageIO { + + protected static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); + protected static final String REFERENCE_ENDPOINTS_WITH_BASEPATHS = "reference-endpoints-with-basepaths"; + static final String BASE_STORE = "base-store"; + protected static final String SECRET_KEY = "secret-key"; + static final String URL_EXPIRATION_MINUTES = "url-expiration-minutes"; + protected static final String REMOTE_STORE_NAME = "remote-store-name"; + protected static final String REMOTE_STORE_URL = "remote-store-url"; + protected StorageIO baseStore = null; + protected String path = null; + protected PoolingHttpClientConnectionManager cm = null; + CloseableHttpClient httpclient = null; + protected static HttpClientContext localContext = HttpClientContext.create(); + + protected int timeout = 1200; + protected RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000) + .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000) + .setCookieSpec(CookieSpecs.STANDARD).setExpectContinueEnabled(true).build(); + protected static boolean trustCerts = false; + protected int httpConcurrency = 4; + + public static String getBaseStoreIdFor(String driverId) { + return getConfigParamForDriver(driverId, BASE_STORE); + } + + public AbstractRemoteOverlayAccessIO() { + super(); + } + + public AbstractRemoteOverlayAccessIO(String storageLocation, String driverId) { + super(storageLocation, driverId); + } + + public AbstractRemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) { + super(dvObject, req, driverId); + } + + @Override + public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException { + return baseStore.openAuxChannel(auxItemTag, options); + } + + @Override + public boolean isAuxObjectCached(String auxItemTag) throws IOException { + return baseStore.isAuxObjectCached(auxItemTag); + } + + @Override + public long getAuxObjectSize(String auxItemTag) throws IOException { + return baseStore.getAuxObjectSize(auxItemTag); + } + + @Override + public Path getAuxObjectAsPath(String auxItemTag) throws IOException { + return baseStore.getAuxObjectAsPath(auxItemTag); + } + + @Override + public void backupAsAux(String auxItemTag) throws IOException { + baseStore.backupAsAux(auxItemTag); + } + + @Override + public void revertBackupAsAux(String auxItemTag) throws IOException { + baseStore.revertBackupAsAux(auxItemTag); + } + + @Override + public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException { + baseStore.savePathAsAux(fileSystemPath, auxItemTag); + } + + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag, filesize); + } + + /** + * @param inputStream InputStream we want to save + * @param auxItemTag String representing this Auxiliary type ("extension") + * @throws IOException if anything goes wrong. + */ + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag); + } + + @Override + public List listAuxObjects() throws IOException { + return baseStore.listAuxObjects(); + } + + @Override + public void deleteAuxObject(String auxItemTag) throws IOException { + baseStore.deleteAuxObject(auxItemTag); + } + + @Override + public void deleteAllAuxObjects() throws IOException { + baseStore.deleteAllAuxObjects(); + } + + @Override + public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { + return baseStore.getAuxFileAsInputStream(auxItemTag); + } + + protected int getUrlExpirationMinutes() { + String optionValue = getConfigParam(URL_EXPIRATION_MINUTES); + if (optionValue != null) { + Integer num; + try { + num = Integer.parseInt(optionValue); + } catch (NumberFormatException ex) { + num = null; + } + if (num != null) { + return num; + } + } + return 60; + } + + public CloseableHttpClient getSharedHttpClient() { + if (httpclient == null) { + try { + initHttpPool(); + httpclient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(config).build(); + + } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) { + logger.warning(ex.getMessage()); + } + } + return httpclient; + } + + private void initHttpPool() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException { + if (trustCerts) { + // use the TrustSelfSignedStrategy to allow Self Signed Certificates + SSLContext sslContext; + SSLConnectionSocketFactory connectionFactory; + + sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustAllStrategy()).build(); + // create an SSL Socket Factory to use the SSLContext with the trust self signed + // certificate strategy + // and allow all hosts verifier. + connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); + + Registry registry = RegistryBuilder.create() + .register("https", connectionFactory).build(); + cm = new PoolingHttpClientConnectionManager(registry); + } else { + cm = new PoolingHttpClientConnectionManager(); + } + cm.setDefaultMaxPerRoute(httpConcurrency); + cm.setMaxTotal(httpConcurrency > 20 ? httpConcurrency : 20); + } + + @Override + abstract public long retrieveSizeFromMedia(); + + @Override + public boolean exists() { + logger.fine("Exists called"); + return (retrieveSizeFromMedia() != -1); + } + + @Override + public List cleanUp(Predicate filter, boolean dryRun) throws IOException { + return baseStore.cleanUp(filter, dryRun); + } + + @Override + public String getStorageLocation() throws IOException { + String fullStorageLocation = dvObject.getStorageIdentifier(); + logger.fine("storageidentifier: " + fullStorageLocation); + int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR); + if (driverIndex >= 0) { + fullStorageLocation = fullStorageLocation + .substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + } + if (this.getDvObject() instanceof Dataset) { + throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject"); + } else if (this.getDvObject() instanceof DataFile) { + fullStorageLocation = StorageIO.getDriverPrefix(this.driverId) + fullStorageLocation; + } else if (dvObject instanceof Dataverse) { + throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); + } + logger.fine("fullStorageLocation: " + fullStorageLocation); + return fullStorageLocation; + } + protected void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + + if (baseStore == null) { + String baseDriverId = getBaseStoreIdFor(driverId); + String fullStorageLocation = null; + String baseDriverType = getConfigParamForDriver(baseDriverId, StorageIO.TYPE, + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + + if (dvObject instanceof Dataset) { + baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); + } else { + if (this.getDvObject() != null) { + fullStorageLocation = getStoragePath(); + + // S3 expects :/// + switch (baseDriverType) { + case DataAccess.S3: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + getConfigParamForDriver(baseDriverId, S3AccessIO.BUCKET_NAME) + "/" + + fullStorageLocation; + break; + case DataAccess.FILE: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + getConfigParamForDriver(baseDriverId, FileAccessIO.DIRECTORY, "/tmp/files") + + "/" + fullStorageLocation; + break; + default: + logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: " + + getConfigParamForDriver(baseDriverId, StorageIO.TYPE)); + throw new IOException("Not supported"); + } + + } else if (storageLocation != null) { + // ://// + // remoteDriverId:// is removed if coming through directStorageIO + int index = storageLocation.indexOf(DataAccess.SEPARATOR); + if (index > 0) { + storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); + } + // The base store needs the baseStoreIdentifier and not the relative URL (if it exists) + int endOfId = storageLocation.indexOf("//"); + fullStorageLocation = (endOfId>-1) ? storageLocation.substring(0, endOfId) : storageLocation; + + switch (baseDriverType) { + case DataAccess.S3: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + getConfigParamForDriver(baseDriverId, S3AccessIO.BUCKET_NAME) + "/" + + fullStorageLocation; + break; + case DataAccess.FILE: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + getConfigParamForDriver(baseDriverId, FileAccessIO.DIRECTORY, "/tmp/files") + + "/" + fullStorageLocation; + break; + default: + logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: " + + getConfigParamForDriver(baseDriverId, StorageIO.TYPE)); + throw new IOException("Not supported"); + } + } + baseStore = DataAccess.getDirectStorageIO(fullStorageLocation); + } + if (baseDriverType.contentEquals(DataAccess.S3)) { + ((S3AccessIO) baseStore).setMainDriver(false); + } + } + remoteStoreName = getConfigParam(REMOTE_STORE_NAME); + try { + remoteStoreUrl = new URL(getConfigParam(REMOTE_STORE_URL)); + } catch (MalformedURLException mfue) { + logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); + } + } + + protected String getStoragePath() throws IOException { + String fullStoragePath = dvObject.getStorageIdentifier(); + logger.fine("storageidentifier: " + fullStoragePath); + int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR); + if (driverIndex >= 0) { + fullStoragePath = fullStoragePath + .substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + } + int suffixIndex = fullStoragePath.indexOf("//"); + if (suffixIndex >= 0) { + fullStoragePath = fullStoragePath.substring(0, suffixIndex); + } + if (getDvObject() instanceof Dataset) { + fullStoragePath = getDataset().getAuthorityForFileStorage() + "/" + + getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath; + } else if (getDvObject() instanceof DataFile) { + fullStoragePath = getDataFile().getOwner().getAuthorityForFileStorage() + "/" + + getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; + } else if (dvObject instanceof Dataverse) { + throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); + } + logger.fine("fullStoragePath: " + fullStoragePath); + return fullStoragePath; + } + + + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java index afc7556481a..ce75395c883 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java @@ -28,7 +28,7 @@ public static String getTransferPath(String driverId) { } public static JsonArray getReferenceEndpointsWithPaths(String driverId) { - String[] endpoints = StorageIO.getConfigParamForDriver(driverId, RemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*"); + String[] endpoints = StorageIO.getConfigParamForDriver(driverId, AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*"); JsonArrayBuilder builder = Json.createArrayBuilder(); for(int i=0;i extends RemoteOverlayAccessIO implements GlobusAccessibleStore { +public class GlobusOverlayAccessIO extends AbstractRemoteOverlayAccessIO implements GlobusAccessibleStore { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO"); /* @@ -67,11 +68,19 @@ public class GlobusOverlayAccessIO extends RemoteOverlayAcce public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); + configureGlobusEndpoints(); + configureStores(req, driverId, null); + logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); + path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); + validatePath(path); + + logger.fine("Relative path: " + path); } public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException { this.driverId = driverId; + configureGlobusEndpoints(); configureStores(null, driverId, storageLocation); if (isManaged()) { String[] parts = DataAccess.getDriverIdAndStorageLocation(storageLocation); @@ -83,6 +92,7 @@ public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOE logger.fine("Referenced path: " + path); } } + private boolean isManaged() { if(dataverseManaged==null) { dataverseManaged = GlobusAccessibleStore.isDataverseManaged(this.driverId); @@ -146,7 +156,6 @@ private static String findMatchingEndpoint(String path, String[] allowedEndpoint return null; } - @Override protected void validatePath(String relPath) throws IOException { if (isManaged()) { if (!usesStandardNamePattern(relPath)) { @@ -363,8 +372,7 @@ public String getStorageLocation() throws IOException { * the derived GlobusOverlayAccessIO can support multiple endpoints. * @throws IOException */ - @Override - protected void configureEndpoints() throws IOException { + protected void configureGlobusEndpoints() throws IOException { allowedEndpoints = getAllowedEndpoints(this.driverId); logger.info("Set allowed endpoints: " + Arrays.toString(allowedEndpoints)); } @@ -435,5 +443,40 @@ public static void main(String[] args) { } } + + + @Override + public void open(DataAccessOption... option) throws IOException { + // TODO Auto-generated method stub + + } + + + @Override + public Path getFileSystemPath() throws IOException { + // TODO Auto-generated method stub + return null; + } + + + @Override + public void savePath(Path fileSystemPath) throws IOException { + // TODO Auto-generated method stub + + } + + + @Override + public void saveInputStream(InputStream inputStream) throws IOException { + // TODO Auto-generated method stub + + } + + + @Override + public void saveInputStream(InputStream inputStream, Long filesize) throws IOException { + // TODO Auto-generated method stub + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java index 5463254140d..1616bfabf96 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java @@ -11,45 +11,23 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; -import java.net.URL; import java.nio.channels.Channel; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.nio.file.Path; -import java.security.KeyManagementException; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; import java.util.List; -import java.util.function.Predicate; -import java.util.logging.Logger; import org.apache.http.Header; -import org.apache.http.client.config.CookieSpecs; -import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.protocol.HttpClientContext; -import org.apache.http.config.Registry; -import org.apache.http.config.RegistryBuilder; -import org.apache.http.conn.socket.ConnectionSocketFactory; -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.conn.ssl.SSLConnectionSocketFactory; -import org.apache.http.conn.ssl.TrustAllStrategy; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.protocol.HTTP; -import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.util.EntityUtils; -import javax.net.ssl.SSLContext; - /** * @author qqmyers */ @@ -61,40 +39,20 @@ * * baseUrl: http(s):// */ -public class RemoteOverlayAccessIO extends StorageIO { - - private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); +public class RemoteOverlayAccessIO extends AbstractRemoteOverlayAccessIO { // A single baseUrl of the form http(s):// where this store can reference data static final String BASE_URL = "base-url"; - // Multiple endpoints where data can be referenced from. Multiple endpoints are separated by a comma. Multiple endpoints are only supported by the GlobalOverlayAccessIO at present. - static final String REFERENCE_ENDPOINTS_WITH_BASEPATHS = "reference-endpoints-with-basepaths"; - static final String BASE_STORE = "base-store"; - static final String SECRET_KEY = "secret-key"; - static final String URL_EXPIRATION_MINUTES = "url-expiration-minutes"; - static final String REMOTE_STORE_NAME = "remote-store-name"; - static final String REMOTE_STORE_URL = "remote-store-url"; - - protected StorageIO baseStore = null; - protected String path = null; - private String baseUrl = null; - - protected static HttpClientContext localContext = HttpClientContext.create(); - protected PoolingHttpClientConnectionManager cm = null; - CloseableHttpClient httpclient = null; - protected int timeout = 1200; - protected RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000) - .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000) - .setCookieSpec(CookieSpecs.STANDARD).setExpectContinueEnabled(true).build(); - protected static boolean trustCerts = false; - protected int httpConcurrency = 4; + String baseUrl = null; public RemoteOverlayAccessIO() { + super(); } public RemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); this.setIsLocalFile(false); + configureRemoteEndpoints(); configureStores(req, driverId, null); logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); @@ -106,6 +64,7 @@ public RemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) public RemoteOverlayAccessIO(String storageLocation, String driverId) throws IOException { super(null, null, driverId); this.setIsLocalFile(false); + configureRemoteEndpoints(); configureStores(null, driverId, storageLocation); path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); @@ -296,105 +255,12 @@ public void delete() throws IOException { } - @Override - public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException { - return baseStore.openAuxChannel(auxItemTag, options); - } - - @Override - public boolean isAuxObjectCached(String auxItemTag) throws IOException { - return baseStore.isAuxObjectCached(auxItemTag); - } - - @Override - public long getAuxObjectSize(String auxItemTag) throws IOException { - return baseStore.getAuxObjectSize(auxItemTag); - } - - @Override - public Path getAuxObjectAsPath(String auxItemTag) throws IOException { - return baseStore.getAuxObjectAsPath(auxItemTag); - } - - @Override - public void backupAsAux(String auxItemTag) throws IOException { - baseStore.backupAsAux(auxItemTag); - } - - @Override - public void revertBackupAsAux(String auxItemTag) throws IOException { - baseStore.revertBackupAsAux(auxItemTag); - } - - @Override - // this method copies a local filesystem Path into this DataAccess Auxiliary - // location: - public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException { - baseStore.savePathAsAux(fileSystemPath, auxItemTag); - } - - @Override - public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException { - baseStore.saveInputStreamAsAux(inputStream, auxItemTag, filesize); - } - - /** - * @param inputStream InputStream we want to save - * @param auxItemTag String representing this Auxiliary type ("extension") - * @throws IOException if anything goes wrong. - */ - @Override - public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException { - baseStore.saveInputStreamAsAux(inputStream, auxItemTag); - } - - @Override - public List listAuxObjects() throws IOException { - return baseStore.listAuxObjects(); - } - - @Override - public void deleteAuxObject(String auxItemTag) throws IOException { - baseStore.deleteAuxObject(auxItemTag); - } - - @Override - public void deleteAllAuxObjects() throws IOException { - baseStore.deleteAllAuxObjects(); - } - - @Override - public String getStorageLocation() throws IOException { - String fullStorageLocation = dvObject.getStorageIdentifier(); - logger.fine("storageidentifier: " + fullStorageLocation); - int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR); - if (driverIndex >= 0) { - fullStorageLocation = fullStorageLocation - .substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); - } - if (this.getDvObject() instanceof Dataset) { - throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject"); - } else if (this.getDvObject() instanceof DataFile) { - fullStorageLocation = StorageIO.getDriverPrefix(this.driverId) + fullStorageLocation; - } else if (dvObject instanceof Dataverse) { - throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); - } - logger.fine("fullStorageLocation: " + fullStorageLocation); - return fullStorageLocation; - } - @Override public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { throw new UnsupportedDataAccessOperationException( "RemoteOverlayAccessIO: this is a remote DataAccess IO object, it has no local filesystem path associated with it."); } - @Override - public boolean exists() { - logger.fine("Exists called"); - return (retrieveSizeFromMedia() != -1); - } - @Override public WritableByteChannel getWriteChannel() throws UnsupportedDataAccessOperationException { throw new UnsupportedDataAccessOperationException( @@ -407,11 +273,6 @@ public OutputStream getOutputStream() throws UnsupportedDataAccessOperationExcep "RemoteOverlayAccessIO: there are no output Streams associated with S3 objects."); } - @Override - public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { - return baseStore.getAuxFileAsInputStream(auxItemTag); - } - @Override public boolean downloadRedirectEnabled() { String optionValue = getConfigParam(StorageIO.DOWNLOAD_REDIRECT); @@ -443,103 +304,12 @@ public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliary } } - int getUrlExpirationMinutes() { - String optionValue = getConfigParam(URL_EXPIRATION_MINUTES); - if (optionValue != null) { - Integer num; - try { - num = Integer.parseInt(optionValue); - } catch (NumberFormatException ex) { - num = null; - } - if (num != null) { - return num; - } - } - return 60; - } - - protected void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { - configureEndpoints(); - - - if (baseStore == null) { - String baseDriverId = getBaseStoreIdFor(driverId); - String fullStorageLocation = null; - String baseDriverType = getConfigParamForDriver(baseDriverId, StorageIO.TYPE, - DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); - - if (dvObject instanceof Dataset) { - baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); - } else { - if (this.getDvObject() != null) { - fullStorageLocation = getStoragePath(); - - // S3 expects :/// - switch (baseDriverType) { - case DataAccess.S3: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + getConfigParamForDriver(baseDriverId, S3AccessIO.BUCKET_NAME) + "/" - + fullStorageLocation; - break; - case DataAccess.FILE: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + getConfigParamForDriver(baseDriverId, FileAccessIO.DIRECTORY, "/tmp/files") - + "/" + fullStorageLocation; - break; - default: - logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: " - + getConfigParamForDriver(baseDriverId, StorageIO.TYPE)); - throw new IOException("Not supported"); - } - - } else if (storageLocation != null) { - // ://// - // remoteDriverId:// is removed if coming through directStorageIO - int index = storageLocation.indexOf(DataAccess.SEPARATOR); - if (index > 0) { - storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); - } - // The base store needs the baseStoreIdentifier and not the relative URL (if it exists) - int endOfId = storageLocation.indexOf("//"); - fullStorageLocation = (endOfId>-1) ? storageLocation.substring(0, endOfId) : storageLocation; - - switch (baseDriverType) { - case DataAccess.S3: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + getConfigParamForDriver(baseDriverId, S3AccessIO.BUCKET_NAME) + "/" - + fullStorageLocation; - break; - case DataAccess.FILE: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + getConfigParamForDriver(baseDriverId, FileAccessIO.DIRECTORY, "/tmp/files") - + "/" + fullStorageLocation; - break; - default: - logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: " - + getConfigParamForDriver(baseDriverId, StorageIO.TYPE)); - throw new IOException("Not supported"); - } - } - baseStore = DataAccess.getDirectStorageIO(fullStorageLocation); - } - if (baseDriverType.contentEquals(DataAccess.S3)) { - ((S3AccessIO) baseStore).setMainDriver(false); - } - } - remoteStoreName = getConfigParam(REMOTE_STORE_NAME); - try { - remoteStoreUrl = new URL(getConfigParam(REMOTE_STORE_URL)); - } catch (MalformedURLException mfue) { - logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); - } - } /** This endpoint configures all the endpoints the store is allowed to reference data from. At present, the RemoteOverlayAccessIO only supports a single endpoint but * the derived GlobusOverlayAccessIO can support multiple endpoints. * @throws IOException */ - protected void configureEndpoints() throws IOException { + protected void configureRemoteEndpoints() throws IOException { baseUrl = getConfigParam(BASE_URL); if (baseUrl == null) { //Will accept the first endpoint using the newer setting @@ -560,70 +330,6 @@ protected void configureEndpoints() throws IOException { } } - // Convenience method to assemble the path, starting with the DOI - // authority/identifier/, that is needed to create a base store via - // DataAccess.getDirectStorageIO - the caller has to add the store type specific - // prefix required. - protected String getStoragePath() throws IOException { - String fullStoragePath = dvObject.getStorageIdentifier(); - logger.fine("storageidentifier: " + fullStoragePath); - int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR); - if (driverIndex >= 0) { - fullStoragePath = fullStoragePath - .substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); - } - int suffixIndex = fullStoragePath.indexOf("//"); - if (suffixIndex >= 0) { - fullStoragePath = fullStoragePath.substring(0, suffixIndex); - } - if (this.getDvObject() instanceof Dataset) { - fullStoragePath = this.getDataset().getAuthorityForFileStorage() + "/" - + this.getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath; - } else if (this.getDvObject() instanceof DataFile) { - fullStoragePath = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" - + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; - } else if (dvObject instanceof Dataverse) { - throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); - } - logger.fine("fullStoragePath: " + fullStoragePath); - return fullStoragePath; - } - - public CloseableHttpClient getSharedHttpClient() { - if (httpclient == null) { - try { - initHttpPool(); - httpclient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(config).build(); - - } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) { - logger.warning(ex.getMessage()); - } - } - return httpclient; - } - - private void initHttpPool() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException { - if (trustCerts) { - // use the TrustSelfSignedStrategy to allow Self Signed Certificates - SSLContext sslContext; - SSLConnectionSocketFactory connectionFactory; - - sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustAllStrategy()).build(); - // create an SSL Socket Factory to use the SSLContext with the trust self signed - // certificate strategy - // and allow all hosts verifier. - connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); - - Registry registry = RegistryBuilder.create() - .register("https", connectionFactory).build(); - cm = new PoolingHttpClientConnectionManager(registry); - } else { - cm = new PoolingHttpClientConnectionManager(); - } - cm.setDefaultMaxPerRoute(httpConcurrency); - cm.setMaxTotal(httpConcurrency > 20 ? httpConcurrency : 20); - } - @Override public void savePath(Path fileSystemPath) throws IOException { throw new UnsupportedDataAccessOperationException( @@ -660,13 +366,4 @@ static boolean isValidIdentifier(String driverId, String storageId) { } return true; } - - public static String getBaseStoreIdFor(String driverId) { - return getConfigParamForDriver(driverId, BASE_STORE); - } - - @Override - public List cleanUp(Predicate filter, boolean dryRun) throws IOException { - return baseStore.cleanUp(filter, dryRun); - } } diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java index 5affc01aff0..1c371881ba6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java @@ -8,7 +8,6 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.GlobalIdServiceBean; import edu.harvard.iq.dataverse.mocks.MocksFactory; import edu.harvard.iq.dataverse.util.UrlSignerUtil; From 2500bccc5fa438bf2dff4e5aa887e816099a51e3 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 15 Nov 2023 14:04:22 -0500 Subject: [PATCH 0244/1112] assert current /bag-info.txt behavior #8760 Also, add a superuser-only API for downloading files (such as bags) from the file system so we can make assertions about them in our tests. --- .../iq/dataverse/api/AbstractApiBean.java | 7 ++ .../edu/harvard/iq/dataverse/api/Admin.java | 25 ++++- .../edu/harvard/iq/dataverse/api/BagIT.java | 101 +++++++++++++++++- 3 files changed, 128 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 027f9e0fcb1..58565bcc9d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -45,11 +45,13 @@ import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonParser; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; +import java.io.InputStream; import java.net.URI; import java.util.Arrays; import java.util.Collections; @@ -726,6 +728,11 @@ protected Response ok(String data, MediaType mediaType, String downloadFilename) return res.build(); } + protected Response ok(InputStream inputStream) { + ResponseBuilder res = Response.ok().entity(inputStream).type(MediaType.valueOf(FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT)); + return res.build(); + } + protected Response created( String uri, JsonObjectBuilder bld ) { return Response.created( URI.create(uri) ) .entity( Json.createObjectBuilder() diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fd3b9a89e54..684ed32dff8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -107,6 +107,7 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.UrlSignerUtil; +import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; @@ -2425,5 +2426,27 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur return ok(Json.createObjectBuilder().add(ExternalToolHandler.SIGNED_URL, signedUrl)); } - + + /** + * For testing only. Download a file from the file system. + */ + @GET + @AuthRequired + @Path("/localfile") + public Response getLocalFile(@Context ContainerRequestContext crc, @QueryParam("pathToFile") String pathToFile) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + try { + return ok(new FileInputStream(pathToFile)); + } catch (IOException ex) { + return error(Status.BAD_REQUEST, ex.toString()); + } + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java index e7210bc45a9..fae9cf95156 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java @@ -1,17 +1,32 @@ package edu.harvard.iq.dataverse.api; -import io.restassured.RestAssured; -import io.restassured.response.Response; import edu.harvard.iq.dataverse.engine.command.impl.LocalSubmitToArchiveCommand; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import io.restassured.RestAssured; +import static io.restassured.RestAssured.given; +import io.restassured.response.Response; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.Enumeration; +import java.util.Scanner; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; import org.junit.jupiter.api.AfterAll; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; public class BagIT { + static String bagitExportDir = "/tmp"; + @BeforeAll public static void setUpClass() { @@ -25,14 +40,14 @@ public static void setUpClass() { setArchiverSettings.then().assertThat() .statusCode(OK.getStatusCode()); - Response setBagItLocalPath = UtilIT.setSetting(":BagItLocalPath", "/tmp"); + Response setBagItLocalPath = UtilIT.setSetting(":BagItLocalPath", bagitExportDir); setBagItLocalPath.then().assertThat() .statusCode(OK.getStatusCode()); } @Test - public void testBagItExport() { + public void testBagItExport() throws IOException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); @@ -63,6 +78,78 @@ public void testBagItExport() { archiveDataset.prettyPrint(); archiveDataset.then().assertThat().statusCode(OK.getStatusCode()); + // spaceName comes from LocalSubmitToArchiveCommand + String spaceName = datasetPid.replace(':', '-').replace('/', '-') + .replace('.', '-').toLowerCase(); + // spacename: doi-10-5072-fk2-fosg5q + + String pathToZip = bagitExportDir + "/" + spaceName + "v1.0" + ".zip"; + + try { + // give the bag time to generate + Thread.sleep(3000); + } catch (InterruptedException ex) { + } + + // A bag could look like this: + //doi-10-5072-FK2-DKUTDUv-1-0/data/ + //doi-10-5072-FK2-DKUTDUv-1-0/data/Darwin's Finches/ + //doi-10-5072-FK2-DKUTDUv-1-0/metadata/ + //doi-10-5072-FK2-DKUTDUv-1-0/metadata/pid-mapping.txt + //doi-10-5072-FK2-DKUTDUv-1-0/manifest-md5.txt + //doi-10-5072-FK2-DKUTDUv-1-0/bagit.txt + //doi-10-5072-FK2-DKUTDUv-1-0/metadata/oai-ore.jsonld + //doi-10-5072-FK2-DKUTDUv-1-0/metadata/datacite.xml + //doi-10-5072-FK2-DKUTDUv-1-0/bag-info.txt + // --- + // bag-info.txt could look like this: + //Contact-Name: Finch, Fiona + //Contact-Email: finch@mailinator.com + //Source-Organization: Dataverse Installation () + //Organization-Address: + //Organization-Email: + //External-Description: Darwin's finches (also known as the Galápagos finches) are a group of about + // fifteen species of passerine birds. + //Bagging-Date: 2023-11-14 + //External-Identifier: https://doi.org/10.5072/FK2/LZIGBC + //Bag-Size: 0 bytes + //Payload-Oxum: 0.0 + //Internal-Sender-Identifier: Root:Darwin's Finches + Response downloadBag = downloadLocalFile(pathToZip, apiToken); + downloadBag.then().assertThat().statusCode(OK.getStatusCode()); + Path outputPath = Paths.get("/tmp/foo.zip"); + java.nio.file.Files.copy(downloadBag.getBody().asInputStream(), outputPath, StandardCopyOption.REPLACE_EXISTING); + + ZipFile zipFile = new ZipFile(outputPath.toString()); + Enumeration entries = zipFile.entries(); + String sourceOrg = null; + String orgAddress = null; + String orgEmail = null; + while (entries.hasMoreElements()) { + ZipEntry entry = entries.nextElement(); + String name = entry.getName(); + System.out.println("name: " + name); + if (name.endsWith("bag-info.txt")) { + InputStream stream = zipFile.getInputStream(entry); + Scanner s = new Scanner(stream).useDelimiter("\\A"); + String result = s.hasNext() ? s.next() : ""; + System.out.println("result: " + result); + String[] lines = result.split("\n"); + for (String line : lines) { + if (line.startsWith("Source-Organization")) { + sourceOrg = line; + } else if (line.startsWith("Organization-Address")) { + orgAddress = line; + } else if (line.startsWith("Organization-Email")) { + orgEmail = line; + } else { + } + } + } + } + assertEquals("Source-Organization: Dataverse Installation ()", sourceOrg.trim()); + assertEquals("Organization-Address: ", orgAddress.trim()); + assertEquals("Organization-Email: ", orgEmail.trim()); } @AfterAll @@ -75,4 +162,10 @@ public static void tearDownClass() { } + static Response downloadLocalFile(String pathToFile, String apiToken) { + return given() + .header("X-Dataverse-key", apiToken) + .get("/api/admin/localfile?pathToFile=" + pathToFile); + } + } From 7240e870d35fda4ec96a4ee0e0b488a9c4fc3d4f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 15 Nov 2023 16:03:15 -0500 Subject: [PATCH 0245/1112] configurable BagIt source org name, address, email #8760 These values were used while testing: DATAVERSE_BAGIT_SOURCEORG_NAME=LibraScholar DATAVERSE_BAGIT_SOURCEORG_ADDRESS=123 Wisdom Way\nCambridge, MA\nUSA DATAVERSE_BAGIT_SOURCEORG_EMAIL=hello@dataverse.librascholar.edu --- .../iq/dataverse/settings/JvmSettings.java | 7 +++++++ .../iq/dataverse/util/bagit/BagGenerator.java | 15 ++++++++++----- src/main/java/propertyFiles/Bundle.properties | 4 ---- 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index cc3272413c7..2f59350906c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -150,6 +150,13 @@ public enum JvmSettings { SCOPE_NETCDF(PREFIX, "netcdf"), GEO_EXTRACT_S3_DIRECT_UPLOAD(SCOPE_NETCDF, "geo-extract-s3-direct-upload"), + // BAGIT SETTINGS + SCOPE_BAGIT(PREFIX, "bagit"), + SCOPE_BAGIT_SOURCEORG(SCOPE_BAGIT, "sourceorg"), + BAGIT_SOURCE_ORG_NAME(SCOPE_BAGIT_SOURCEORG, "name"), + BAGIT_SOURCEORG_ADDRESS(SCOPE_BAGIT_SOURCEORG, "address"), + BAGIT_SOURCEORG_EMAIL(SCOPE_BAGIT_SOURCEORG, "email"), + ; private static final String SCOPE_SEPARATOR = "."; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java index baba1a0cb43..b7c44014b80 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java @@ -74,7 +74,9 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFile.ChecksumType; import edu.harvard.iq.dataverse.pidproviders.PidUtil; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; +import java.util.Optional; public class BagGenerator { @@ -822,17 +824,20 @@ private String generateInfoFile() { logger.warning("No contact info available for BagIt Info file"); } - info.append("Source-Organization: " + BundleUtil.getStringFromBundle("bagit.sourceOrganization")); + String orgName = JvmSettings.BAGIT_SOURCE_ORG_NAME.lookupOptional(String.class).orElse("Dataverse Installation ()"); + String orgAddress = JvmSettings.BAGIT_SOURCEORG_ADDRESS.lookupOptional(String.class).orElse(""); + String orgEmail = JvmSettings.BAGIT_SOURCEORG_EMAIL.lookupOptional(String.class).orElse(""); + + info.append("Source-Organization: " + orgName); // ToDo - make configurable info.append(CRLF); - info.append("Organization-Address: " + WordUtils.wrap( - BundleUtil.getStringFromBundle("bagit.sourceOrganizationAddress"), 78, CRLF + " ", true)); + info.append("Organization-Address: " + WordUtils.wrap(orgAddress, 78, CRLF + " ", true)); + info.append(CRLF); // Not a BagIt standard name - info.append( - "Organization-Email: " + BundleUtil.getStringFromBundle("bagit.sourceOrganizationEmail")); + info.append("Organization-Email: " + orgEmail); info.append(CRLF); info.append("External-Description: "); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 79887f7e76c..972e5e35601 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2379,10 +2379,6 @@ api.prov.error.freeformMissingJsonKey=The JSON object you send must have a key c api.prov.error.freeformNoText=No provenance free form text available for this file. api.prov.error.noDataFileFound=Could not find a file based on ID. -bagit.sourceOrganization=Dataverse Installation () -bagit.sourceOrganizationAddress= -bagit.sourceOrganizationEmail= - bagit.checksum.validation.error=Invalid checksum for file "{0}". Manifest checksum={2}, calculated checksum={3}, type={1} bagit.checksum.validation.exception=Error while calculating checksum for file "{0}". Checksum type={1}, error={2} bagit.validation.bag.file.not.found=Invalid BagIt package: "{0}" From b2c62510e71e6436c2905796b9cc6a24a04b35d0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 14:06:06 -0500 Subject: [PATCH 0246/1112] add docs and release note for bag-info.txt config #8760 --- doc/release-notes/8760-bagit.md | 15 ++++++ .../source/installation/config.rst | 46 +++++++++++++++++++ 2 files changed, 61 insertions(+) create mode 100644 doc/release-notes/8760-bagit.md diff --git a/doc/release-notes/8760-bagit.md b/doc/release-notes/8760-bagit.md new file mode 100644 index 00000000000..30601857309 --- /dev/null +++ b/doc/release-notes/8760-bagit.md @@ -0,0 +1,15 @@ +For BagIT export, it is now possible to configure the following information in bag-info.txt... + +Source-Organization: Harvard Dataverse +Organization-Address: 1737 Cambridge Street, Cambridge, MA, USA +Organization-Email: support@dataverse.harvard.edu + +... using new JVM/MPCONFIG options: + +- dataverse.bagit.sourceorg.name +- dataverse.bagit.sourceorg.address +- dataverse.bagit.sourceorg.email + +Previously, customization was possible by editing `Bundle.properties` but this is no longer supported. + +For details, see https://dataverse-guide--10122.org.readthedocs.build/en/10122/installation/config.html#bag-info-txt diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 13a7367de44..df311fcdaca 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1608,6 +1608,25 @@ The workflow id returned in this call (or available by doing a GET of /api/admin Once these steps are taken, new publication requests will automatically trigger submission of an archival copy to the specified archiver, Chronopolis' DuraCloud component in this example. For Chronopolis, as when using the API, it is currently the admin's responsibility to snap-shot the DuraCloud space and monitor the result. Failure of the workflow, (e.g. if DuraCloud is unavailable, the configuration is wrong, or the space for this dataset already exists due to a prior publication action or use of the API), will create a failure message but will not affect publication itself. +.. _bag-info.txt: + +Configuring bag-info.txt +++++++++++++++++++++++++ + +Out of the box, placeholder values like below will be placed in bag-info.txt: + +.. code-block:: text + + Source-Organization: Dataverse Installation () + Organization-Address: + Organization-Email: + +To customize these values for your institution, use the following JVM options: + +- :ref:`dataverse.bagit.sourceorg.name` +- :ref:`dataverse.bagit.sourceorg.address` +- :ref:`dataverse.bagit.sourceorg.email` + Going Live: Launching Your Production Deployment ------------------------------------------------ @@ -2506,6 +2525,33 @@ See also :ref:`guestbook-at-request-api` in the API Guide, and . Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_GUESTBOOK_AT_REQUEST``. +.. _dataverse.bagit.sourceorg.name: + +dataverse.bagit.sourceorg.name +++++++++++++++++++++++++++++++ + +The name for your institution that you'd like to appear in bag-info.txt. See :ref:`bag-info.txt`. + +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_BAGIT_SOURCEORG_NAME``. + +.. _dataverse.bagit.sourceorg.address: + +dataverse.bagit.sourceorg.address ++++++++++++++++++++++++++++++++++ + +The mailing address for your institution that you'd like to appear in bag-info.txt. See :ref:`bag-info.txt`. The example in https://datatracker.ietf.org/doc/html/rfc8493 uses commas as separators: ``1 Main St., Cupertino, California, 11111``. + +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_BAGIT_SOURCEORG_ADDRESS``. + +.. _dataverse.bagit.sourceorg.email: + +dataverse.bagit.sourceorg.email ++++++++++++++++++++++++++++++++ + +The email for your institution that you'd like to appear in bag-info.txt. See :ref:`bag-info.txt`. + +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_BAGIT_SOURCEORG_EMAIL``. + .. _feature-flags: Feature Flags From fa6f850b28e8dea1dd2dff542814e29fd7865153 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 16:07:30 -0500 Subject: [PATCH 0247/1112] limit to downloading from /tmp, add docs #8760 --- doc/release-notes/8760-download-tmp-file.md | 3 +++ doc/sphinx-guides/source/api/changelog.rst | 7 +++++ doc/sphinx-guides/source/api/native-api.rst | 10 +++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 13 +++++++--- .../edu/harvard/iq/dataverse/api/AdminIT.java | 26 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/BagIT.java | 10 ++----- .../edu/harvard/iq/dataverse/api/UtilIT.java | 7 +++++ 7 files changed, 64 insertions(+), 12 deletions(-) create mode 100644 doc/release-notes/8760-download-tmp-file.md diff --git a/doc/release-notes/8760-download-tmp-file.md b/doc/release-notes/8760-download-tmp-file.md new file mode 100644 index 00000000000..7623a91ac9a --- /dev/null +++ b/doc/release-notes/8760-download-tmp-file.md @@ -0,0 +1,3 @@ +A new API has been added for testing purposes that allows files to be downloaded from /tmp. + +See diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index d6742252d27..7d6545999ca 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -5,6 +5,13 @@ API Changelog :local: :depth: 1 +6.1 +--- + +New +~~~ +- **/api/admin/downloadTmpFile**: See :ref:`download-file-from-tmp`. + 6.0 ----- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 1992390410c..5b1e7410a4f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -5349,6 +5349,16 @@ A curl example using an ``ID`` Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. +.. _download-file-from-tmp: + +Download File from /tmp +~~~~~~~~~~~~~~~~~~~~~~~ + +As a superuser:: + + GET /api/admin/downloadTmpFile?fullyQualifiedPathToFile=/tmp/foo.txt + +Note that this API is probably only useful for testing. MyData ------ diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 684ed32dff8..4da1962853a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -123,6 +123,7 @@ import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; +import java.nio.file.Paths; /** * Where the secure, setup API calls live. @@ -2428,12 +2429,12 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur } /** - * For testing only. Download a file from the file system. + * For testing only. Download a file from /tmp. */ @GET @AuthRequired - @Path("/localfile") - public Response getLocalFile(@Context ContainerRequestContext crc, @QueryParam("pathToFile") String pathToFile) { + @Path("/downloadTmpFile") + public Response downloadTmpFile(@Context ContainerRequestContext crc, @QueryParam("fullyQualifiedPathToFile") String fullyQualifiedPathToFile) { try { AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); if (!user.isSuperuser()) { @@ -2442,8 +2443,12 @@ public Response getLocalFile(@Context ContainerRequestContext crc, @QueryParam(" } catch (WrappedResponse wr) { return wr.getResponse(); } + java.nio.file.Path normalizedPath = Paths.get(fullyQualifiedPathToFile).normalize(); + if (!normalizedPath.toString().startsWith("/tmp")) { + return error(Status.BAD_REQUEST, "Path must begin with '/tmp' but after normalization was '" + normalizedPath +"'."); + } try { - return ok(new FileInputStream(pathToFile)); + return ok(new FileInputStream(fullyQualifiedPathToFile)); } catch (IOException ex) { return error(Status.BAD_REQUEST, ex.toString()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 0c5de662e8a..91ba67b10ff 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -854,6 +854,32 @@ public void testBannerMessages(){ } + /** + * For a successful download from /tmp, see BagIT. Here we are doing error + * checking. + */ + @Test + public void testDownloadTmpFile() throws IOException { + + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response tryToDownloadAsNonSuperuser = UtilIT.downloadTmpFile("/tmp/foo", apiToken); + tryToDownloadAsNonSuperuser.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + + Response toggleSuperuser = UtilIT.makeSuperUser(username); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response tryToDownloadEtcPasswd = UtilIT.downloadTmpFile("/etc/passwd", apiToken); + tryToDownloadEtcPasswd.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo("ERROR")) + .body("message", equalTo("Path must begin with '/tmp' but after normalization was '/etc/passwd'.")); + } + private String createTestNonSuperuserApiToken() { Response createUserResponse = UtilIT.createRandomUser(); createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java index fae9cf95156..28f7fa28328 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java @@ -115,7 +115,7 @@ public void testBagItExport() throws IOException { //Bag-Size: 0 bytes //Payload-Oxum: 0.0 //Internal-Sender-Identifier: Root:Darwin's Finches - Response downloadBag = downloadLocalFile(pathToZip, apiToken); + Response downloadBag = UtilIT.downloadTmpFile(pathToZip, apiToken); downloadBag.then().assertThat().statusCode(OK.getStatusCode()); Path outputPath = Paths.get("/tmp/foo.zip"); java.nio.file.Files.copy(downloadBag.getBody().asInputStream(), outputPath, StandardCopyOption.REPLACE_EXISTING); @@ -162,10 +162,4 @@ public static void tearDownClass() { } - static Response downloadLocalFile(String pathToFile, String apiToken) { - return given() - .header("X-Dataverse-key", apiToken) - .get("/api/admin/localfile?pathToFile=" + pathToFile); - } - -} +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e3a7fd0cfc3..6abfb10c4f6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3574,4 +3574,11 @@ static Response getDownloadSize(Integer datasetId, return requestSpecification .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); } + + static Response downloadTmpFile(String fullyQualifiedPathToFile, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/admin/downloadTmpFile?fullyQualifiedPathToFile=" + fullyQualifiedPathToFile); + } + } From 06f6222ba785fa37890efa4156ec3e7988fe4ff5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 19 Nov 2023 20:29:47 -0500 Subject: [PATCH 0248/1112] more intermediate changes to the entity classes #8549 --- .../edu/harvard/iq/dataverse/DvObject.java | 28 +++++++++++++++++++ .../iq/dataverse/DvObjectContainer.java | 8 ++++-- .../dataverse/ingest/IngestServiceBean.java | 7 +++++ 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 9e7f3f3fe96..b86fabd0a07 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -2,6 +2,8 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.pidproviders.PidUtil; +import edu.harvard.iq.dataverse.storageuse.StorageQuota; +import edu.harvard.iq.dataverse.storageuse.StorageUse; import java.sql.Timestamp; import java.text.SimpleDateFormat; @@ -156,6 +158,9 @@ public String visit(DataFile df) { private boolean identifierRegistered; + @Column(nullable = true) + private Long storageSize; + private transient GlobalId globalId = null; @OneToMany(mappedBy = "dvObject", cascade = CascadeType.ALL, orphanRemoval = true) @@ -177,6 +182,13 @@ public void setAlternativePersistentIndentifiers(Set saveAndAddFilesToDataset(DatasetVersion version, + List newFiles, + DataFile fileToReplace, + boolean tabIngest) { + return saveAndAddFilesToDataset(version, newFiles, fileToReplace, tabIngest, null); + } public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, DataFile fileToReplace, From 8766932b6c086b1775e3faf8e19f411d83f87c07 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 19 Nov 2023 21:09:12 -0500 Subject: [PATCH 0249/1112] extra logging --- .../iq/dataverse/search/SearchIncludeFragment.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 400f10cc375..c579eb14b7e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -343,9 +343,10 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused setSolrErrorEncountered(false); try { - logger.fine("ATTENTION! query from user: " + query); - logger.fine("ATTENTION! queryToPassToSolr: " + queryToPassToSolr); - logger.fine("ATTENTION! sort by: " + sortField); + logger.info("ATTENTION! query from user: " + query); + logger.info("ATTENTION! queryToPassToSolr: " + queryToPassToSolr); + logger.info("ATTENTION! filterQueriesFinal: " + filterQueriesFinal); + logger.info("ATTENTION! sort by: " + sortField); /** * @todo Number of search results per page should be configurable - @@ -408,6 +409,8 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused } } filterQueriesFinalSecondPass.add(SearchFields.TYPE + ":(" + combine(arr, " OR ") + ")"); + logger.info("second pass query: " + queryToPassToSolr); + logger.info("second pass filter query: "+filterQueriesFinalSecondPass.toString()); solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, null, sortOrder.toString(), 0, onlyDataRelatedToMe, 1, false, null, null, false, false); From 552e7350cd7f9d9eb577b056e8d3eb414e8dc3cc Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 19 Nov 2023 21:09:40 -0500 Subject: [PATCH 0250/1112] get quota command #8549 --- .../impl/GetCollectionQuotaCommand.java | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionQuotaCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionQuotaCommand.java new file mode 100644 index 00000000000..f07fde9508e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionQuotaCommand.java @@ -0,0 +1,45 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import java.io.IOException; +import java.util.List; +import java.util.logging.Logger; + +/** + * + * @author landreev + * The command doesn't do much. It's sole purpose is to check the permissions + * when it's called by the /api/dataverses/.../storage/quota api. + */ +@RequiredPermissions(Permission.ManageDataversePermissions) +public class GetCollectionQuotaCommand extends AbstractCommand { + + private static final Logger logger = Logger.getLogger(GetCollectionQuotaCommand.class.getCanonicalName()); + + private final Dataverse dataverse; + + public GetCollectionQuotaCommand(DataverseRequest aRequest, Dataverse target) { + super(aRequest, target); + dataverse = target; + } + + @Override + public Long execute(CommandContext ctxt) throws CommandException { + + if (dataverse != null && dataverse.getStorageQuota() != null) { + return dataverse.getStorageQuota().getAllocation(); + } + + return null; + } +} + + From e4aea93f0ada3212d1116b13cd0b2ae8105100e1 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 19 Nov 2023 21:20:29 -0500 Subject: [PATCH 0251/1112] extra logging --- .../edu/harvard/iq/dataverse/search/SearchIncludeFragment.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index c579eb14b7e..e5b5763efe6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -345,7 +345,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused try { logger.info("ATTENTION! query from user: " + query); logger.info("ATTENTION! queryToPassToSolr: " + queryToPassToSolr); - logger.info("ATTENTION! filterQueriesFinal: " + filterQueriesFinal); + logger.info("ATTENTION! filterQueriesFinal: " + filterQueriesFinal.toString()); logger.info("ATTENTION! sort by: " + sortField); /** From 2b8777990d008b31e61c4338f5b5e964e1f4a20d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 19 Nov 2023 21:21:17 -0500 Subject: [PATCH 0252/1112] new classes and instances #8549 --- .../iq/dataverse/storageuse/StorageQuota.java | 118 ++++++++++++++++++ .../iq/dataverse/storageuse/StorageUse.java | 94 ++++++++++++++ .../storageuse/StorageUseServiceBean.java | 65 ++++++++++ 3 files changed, 277 insertions(+) create mode 100644 src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java new file mode 100644 index 00000000000..68ff6d95d00 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java @@ -0,0 +1,118 @@ +/* + * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license + * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template + */ +package edu.harvard.iq.dataverse.storageuse; + +import edu.harvard.iq.dataverse.DvObject; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; +import java.io.Serializable; +import java.util.logging.Logger; + +//import jakarta.persistence.*; + +/** + * + * @author landreev + * + */ +@Entity +public class StorageQuota implements Serializable { + private static final Logger logger = Logger.getLogger(StorageQuota.class.getCanonicalName()); + + /** + * Only Collection quotas are supported, for now + */ + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + /** + * For defining quotas for Users and/or Groups + * (Not supported as of yet) + + @Column(nullable = true) + private String assigneeIdentifier; + */ + + /** + * Could be changed to ManyToOne - if we wanted to be able to define separate + * quotas on the same collection for different users. (?) + * Whether we actually want to support the above is TBD. (possibly not) + * Only collection-wide quotas are supported for now. + */ + @OneToOne + @JoinColumn(name="definitionPoint_id", nullable=true) + private DvObject definitionPoint; + + @Column(nullable = true) + private Long allocation; + + public StorageQuota() {} + + /*public String getAssigneeIdentifier() { + return assigneeIdentifier; + } + + public void setAssigneeIdentifier(String assigneeIdentifier) { + this.assigneeIdentifier = assigneeIdentifier; + }*/ + + public DvObject getDefinitionPoint() { + return definitionPoint; + } + + public void setDefinitionPoint(DvObject definitionPoint) { + this.definitionPoint = definitionPoint; + } + + public Long getAllocation() { + return allocation; + } + + public void setAllocation(Long allocation) { + this.allocation = allocation; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof StorageQuota)) { + return false; + } + StorageQuota other = (StorageQuota) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "edu.harvard.iq.dataverse.storageuse.StorageQuota[ id=" + id + " ]"; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java new file mode 100644 index 00000000000..2633e3e026b --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java @@ -0,0 +1,94 @@ +/* + * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license + * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template + */ +package edu.harvard.iq.dataverse.storageuse; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.DvObjectContainer; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GenerationType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; +import java.io.Serializable; + +/** + * + * @author landreev + */ +@NamedQueries({ + @NamedQuery(name = "StorageUse.findByteSizeByDvContainerId",query = "SELECT su.sizeInBytes FROM StorageUse su WHERE su.dvObjectContainer.id =:dvObjectId "), + @NamedQuery(name = "StorageUse.findByDvContainerId",query = "SELECT su FROM StorageUse su WHERE su.dvObjectContainer.id =:dvObjectId ") +}) +@Entity +public class StorageUse implements Serializable { + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.AUTO) + private Long id; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + @OneToOne + @JoinColumn(nullable=false) + private DvObject dvObjectContainer; + + @Column + private Long sizeInBytes = null; + + public StorageUse(DvObjectContainer dvObjectContainer, Long sizeInBytes) { + this.dvObjectContainer = dvObjectContainer; + this.sizeInBytes = sizeInBytes; + } + + public Long getSizeInBytes() { + return sizeInBytes; + } + + public void setSizeInBytes(Long sizeInBytes) { + this.sizeInBytes = sizeInBytes; + } + + public void incrementSizeInBytes(Long sizeInBytes) { + this.sizeInBytes += sizeInBytes; + } + + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof StorageUse)) { + return false; + } + StorageUse other = (StorageUse) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "edu.harvard.iq.dataverse.storageuse.StorageUse[ id=" + id + " ]"; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java new file mode 100644 index 00000000000..fd04344c234 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java @@ -0,0 +1,65 @@ +/* + * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license + * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template + */ +package edu.harvard.iq.dataverse.storageuse; + +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.DvObjectContainer; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import java.util.logging.Logger; + +/** + * + * @author landreev + */ +@Stateless +@Named +public class StorageUseServiceBean implements java.io.Serializable { + private static final Logger logger = Logger.getLogger(StorageUseServiceBean.class.getCanonicalName()); + @EJB + DataverseServiceBean dataverseService; + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + public StorageUse findByDvContainerId(Long dvObjectId) { + return em.createNamedQuery("StorageUse.findByDvContainerId", StorageUse.class).setParameter("dvObjectId", dvObjectId).getSingleResult(); + } + + public Long findStorageSizeByDvContainerId(Long dvObjectId) { + return em.createNamedQuery("StorageUse.findByteSizeByDvContainerId", Long.class).setParameter("dvObjectId", dvObjectId).getSingleResult(); + } + + public void incrementStorageSizeHierarchy(DvObjectContainer dvObject, Long filesize) { + incrementStorageSize(dvObject, filesize); + DvObjectContainer parent = dvObject.getOwner(); + while (parent != null) { + incrementStorageSize(parent, filesize); + parent = parent.getOwner(); + } + } + + /** + * Should this be done in a new transaction? + * @param dvObject + * @param filesize + */ + public void incrementStorageSize(DvObjectContainer dvObject, Long filesize) { + StorageUse dvContainerSU = findByDvContainerId(dvObject.getId()); + if (dvContainerSU != null) { + // @todo: named query + dvContainerSU.incrementSizeInBytes(filesize); + em.merge(dvContainerSU); + } else { + dvContainerSU = new StorageUse(dvObject, filesize); + em.persist(dvContainerSU); + } + } + +} From 235b1b018a50fd099c983516b046c6847be41e48 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 19 Nov 2023 21:44:47 -0500 Subject: [PATCH 0253/1112] A fix for the missing subtree filter query in the 2nd pass search query. #9635 --- .../search/SearchIncludeFragment.java | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index e5b5763efe6..1acd4b0f8a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -282,7 +282,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused SolrQueryResponse solrQueryResponse = null; SolrQueryResponse solrQueryResponseSecondPass = null; - List filterQueriesFinal = new ArrayList<>(); + List filterQueriesExtended = new ArrayList<>(); if (dataverseAlias != null) { this.dataverse = dataverseService.findByAlias(dataverseAlias); @@ -296,7 +296,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused * @todo centralize this into SearchServiceBean */ if (!isfilterQueryAlreadyInMap(filterDownToSubtree)){ - filterQueriesFinal.add(filterDownToSubtree); + filterQueriesExtended.add(filterDownToSubtree); } // this.dataverseSubtreeContext = dataversePath; } else { @@ -309,7 +309,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused this.setRootDv(true); } - filterQueriesFinal.addAll(filterQueries); + filterQueriesExtended.addAll(filterQueries); /** * Add type queries, for the types (Dataverses, Datasets, Datafiles) @@ -323,7 +323,9 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused selectedTypesHumanReadable = combine(arr, " OR "); if (!selectedTypesHumanReadable.isEmpty()) { typeFilterQuery = SearchFields.TYPE + ":(" + selectedTypesHumanReadable + ")"; - } + } + List filterQueriesFinal = new ArrayList<>(); + filterQueriesFinal.addAll(filterQueriesExtended); filterQueriesFinal.add(typeFilterQuery); if (page <= 1) { @@ -343,10 +345,10 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused setSolrErrorEncountered(false); try { - logger.info("ATTENTION! query from user: " + query); - logger.info("ATTENTION! queryToPassToSolr: " + queryToPassToSolr); - logger.info("ATTENTION! filterQueriesFinal: " + filterQueriesFinal.toString()); - logger.info("ATTENTION! sort by: " + sortField); + logger.fine"ATTENTION! query from user: " + query); + logger.fine("ATTENTION! queryToPassToSolr: " + queryToPassToSolr); + logger.fine("ATTENTION! filterQueriesFinal: " + filterQueriesFinal.toString()); + logger.fine("ATTENTION! sort by: " + sortField); /** * @todo Number of search results per page should be configurable - @@ -399,7 +401,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused // run a second search to obtain the numbers of the unselected types: List filterQueriesFinalSecondPass = new ArrayList<>(); - filterQueriesFinalSecondPass.addAll(filterQueries); + filterQueriesFinalSecondPass.addAll(filterQueriesExtended); arr = new String[3 - selectedTypesList.size()]; int c = 0; @@ -409,8 +411,8 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused } } filterQueriesFinalSecondPass.add(SearchFields.TYPE + ":(" + combine(arr, " OR ") + ")"); - logger.info("second pass query: " + queryToPassToSolr); - logger.info("second pass filter query: "+filterQueriesFinalSecondPass.toString()); + logger.fine("second pass query: " + queryToPassToSolr); + logger.fine("second pass filter query: "+filterQueriesFinalSecondPass.toString()); solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, null, sortOrder.toString(), 0, onlyDataRelatedToMe, 1, false, null, null, false, false); From ceeeaecb9d222c2d2073713cdd839dac2ab4a304 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 19 Nov 2023 21:47:30 -0500 Subject: [PATCH 0254/1112] typo. #9635 --- .../edu/harvard/iq/dataverse/search/SearchIncludeFragment.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 1acd4b0f8a1..dd9cd78982a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -345,7 +345,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused setSolrErrorEncountered(false); try { - logger.fine"ATTENTION! query from user: " + query); + logger.fine("ATTENTION! query from user: " + query); logger.fine("ATTENTION! queryToPassToSolr: " + queryToPassToSolr); logger.fine("ATTENTION! filterQueriesFinal: " + filterQueriesFinal.toString()); logger.fine("ATTENTION! sort by: " + sortField); From 5ecfd49c7397f04003c745fc78074e1fb1a9b0aa Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 20 Nov 2023 09:30:16 -0500 Subject: [PATCH 0255/1112] #9686 update metrics queries --- .../dataverse/metrics/MetricsServiceBean.java | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 79369207963..6b540595e77 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -138,8 +138,8 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat + "from datasetversion\n" + "where versionstate='RELEASED' \n" + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : "and dataset_id in (select dataset.id from dataset, dvobject where dataset.id=dvobject.id\n") - + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NULL and publicationdate is not null\n " : "") - + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NOT NULL\n " : "") + + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NULL and publicationdate is not null\n " : "") + + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NOT NULL\n " : "") + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : ")\n") + "group by dataset_id) as subq group by subq.date order by date;" @@ -156,11 +156,13 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat * @param d */ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { - String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NULL)\n"; + + System.out.print("datasets to month..."); + String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NULL)\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated - String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NOT NULL)\n"; + String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NOT NULL)\n"; if (DATA_LOCATION_REMOTE.equals(dataLocation)) { dataLocationLine = harvestBaseLine; // replace } else if (DATA_LOCATION_ALL.equals(dataLocation)) { @@ -189,7 +191,7 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + + "join dvobject on dvobject.id = dataset.id\n" + "where versionstate='RELEASED' \n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + "and \n" @@ -198,7 +200,6 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { +") sub_temp" ); logger.log(Level.FINE, "Metric query: {0}", query); - return (long) query.getSingleResult(); } @@ -212,6 +213,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + " from datasetversion\n" + " join dataset on dataset.id = datasetversion.dataset_id\n" + + " join dvobject on dataset.id = dvobject.id \n" + " where versionstate='RELEASED'\n" + " and dataset.harvestingclient_id is null\n" + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + @@ -225,6 +227,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio // so the query is simpler: String harvestOriginClause = "(\n" + " datasetversion.dataset_id = dataset.id\n" + + " dvobject.id = dataset.id \n" + " AND dataset.harvestingclient_id IS NOT null \n" + " AND date_trunc('month', datasetversion.createtime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + ")\n"; @@ -253,7 +256,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio + "ORDER BY count(dataset.id) desc;" ); logger.log(Level.FINE, "Metric query: {0}", query); - + System.out.print("by sub to month: " + query); return query.getResultList(); } @@ -616,7 +619,7 @@ public String returnUnexpiredCacheDayBased(String metricName, String days, Strin public String returnUnexpiredCacheMonthly(String metricName, String yyyymm, String dataLocation, Dataverse d) { Metric queriedMetric = getMetric(metricName, dataLocation, yyyymm, d); - + System.out.print("returnUnexpiredCacheMonthly: " + queriedMetric); if (!doWeQueryAgainMonthly(queriedMetric)) { return queriedMetric.getValueJson(); } From f69c22982aeae57fdfb57607e06dfad628123b45 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 20 Nov 2023 09:33:06 -0500 Subject: [PATCH 0256/1112] #9686 update metrics IT --- src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java index e3328eefb4a..fa05a23b675 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java @@ -30,7 +30,7 @@ public static void cleanUpClass() { @Test public void testGetDataversesToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-11"; // yyyymm = null; Response response = UtilIT.metricsDataversesToMonth(yyyymm, null); String precache = response.prettyPrint(); @@ -54,7 +54,7 @@ public void testGetDataversesToMonth() { @Test public void testGetDatasetsToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-11"; // yyyymm = null; Response response = UtilIT.metricsDatasetsToMonth(yyyymm, null); String precache = response.prettyPrint(); @@ -77,7 +77,7 @@ public void testGetDatasetsToMonth() { @Test public void testGetFilesToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-11"; // yyyymm = null; Response response = UtilIT.metricsFilesToMonth(yyyymm, null); String precache = response.prettyPrint(); @@ -100,7 +100,7 @@ public void testGetFilesToMonth() { @Test public void testGetDownloadsToMonth() { - String yyyymm = "2018-04"; + String yyyymm = "2023-11"; // yyyymm = null; Response response = UtilIT.metricsDownloadsToMonth(yyyymm, null); String precache = response.prettyPrint(); From e4ede35ea8a57afc8830dc63619bed3b660da8ff Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 20 Nov 2023 09:37:27 -0500 Subject: [PATCH 0257/1112] #9464 fix logger reference --- .../engine/command/impl/ValidateDatasetJsonCommand.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ValidateDatasetJsonCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ValidateDatasetJsonCommand.java index ae1a89c3661..619740ddd89 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ValidateDatasetJsonCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ValidateDatasetJsonCommand.java @@ -21,7 +21,7 @@ @RequiredPermissions(Permission.AddDataset) public class ValidateDatasetJsonCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(GetDatasetSchemaCommand.class.getCanonicalName()); + private static final Logger logger = Logger.getLogger(ValidateDatasetJsonCommand.class.getCanonicalName()); private final Dataverse dataverse; private final String datasetJson; From d30ecfda14bd4adcafced8486d58507aba12c55f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 25 Oct 2023 10:56:14 -0400 Subject: [PATCH 0258/1112] add S3 tests, LocalStack, MinIO #6783 Developers can now test S3 locally by using the Dockerized development environment, which now includes both LocalStack and MinIO. See S3AccessIT which executes API (end to end) tests. In addition, a new integration test test class (not an API test, the new kind launched with `mvn verify`) has been added at S3AccessIOLocalstackIT. It uses Testcontainers to spin up Localstack for S3 testing and does not require Dataverse to be running. Note that the format of docker-compose-dev.yml had to change to allow for JVM options to be added. Finally, docs were improved for listing and setting stores via API. --- conf/localstack/buckets.sh | 3 + doc/release-notes/6783-s3-tests.md | 3 + .../source/admin/dataverses-datasets.rst | 4 + docker-compose-dev.yml | 78 +++++- pom.xml | 5 + .../harvard/iq/dataverse/api/S3AccessIT.java | 228 +++++++++++++++--- .../dataaccess/S3AccessIOLocalstackIT.java | 153 ++++++++++++ 7 files changed, 436 insertions(+), 38 deletions(-) create mode 100755 conf/localstack/buckets.sh create mode 100644 doc/release-notes/6783-s3-tests.md create mode 100644 src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOLocalstackIT.java diff --git a/conf/localstack/buckets.sh b/conf/localstack/buckets.sh new file mode 100755 index 00000000000..fe940d9890d --- /dev/null +++ b/conf/localstack/buckets.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +# https://stackoverflow.com/questions/53619901/auto-create-s3-buckets-on-localstack +awslocal s3 mb s3://mybucket diff --git a/doc/release-notes/6783-s3-tests.md b/doc/release-notes/6783-s3-tests.md new file mode 100644 index 00000000000..1febb87aaed --- /dev/null +++ b/doc/release-notes/6783-s3-tests.md @@ -0,0 +1,3 @@ +Developers can now test S3 locally by using the Dockerized development environment, which now includes both LocalStack and MinIO. See S3AccessIT which executes API (end to end) tests. + +In addition, a new integration test test class (not an API test, the new kind launched with `mvn verify`) has been added at S3AccessIOLocalstackIT. It uses Testcontainers to spin up Localstack for S3 testing and does not require Dataverse to be running. diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index 170807d3d67..37494c57fa1 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -53,11 +53,15 @@ Configure a Dataverse Collection to Store All New Files in a Specific File Store To direct new files (uploaded when datasets are created or edited) for all datasets in a given Dataverse collection, the store can be specified via the API as shown below, or by editing the 'General Information' for a Dataverse collection on the Dataverse collection page. Only accessible to superusers. :: curl -H "X-Dataverse-key: $API_TOKEN" -X PUT -d $storageDriverLabel http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver + +(Note that for ``dataverse.files.store1.label=MyLabel``, you should pass ``MyLabel``.) The current driver can be seen using:: curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver +(Note that for ``dataverse.files.store1.label=MyLabel``, ``store1`` will be returned.) + and can be reset to the default store with:: curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index bb0a4c95b12..769c24fb3a5 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -9,16 +9,38 @@ services: restart: on-failure user: payara environment: - - DATAVERSE_DB_HOST=postgres - - DATAVERSE_DB_PASSWORD=secret - - DATAVERSE_DB_USER=${DATAVERSE_DB_USER} - - ENABLE_JDWP=1 - - DATAVERSE_FEATURE_API_BEARER_AUTH=1 - - DATAVERSE_AUTH_OIDC_ENABLED=1 - - DATAVERSE_AUTH_OIDC_CLIENT_ID=test - - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 - - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test - - DATAVERSE_JSF_REFRESH_PERIOD=1 + DATAVERSE_DB_HOST: postgres + DATAVERSE_DB_PASSWORD: secret + DATAVERSE_DB_USER: ${DATAVERSE_DB_USER} + ENABLE_JDWP: "1" + DATAVERSE_FEATURE_API_BEARER_AUTH: "1" + DATAVERSE_AUTH_OIDC_ENABLED: "1" + DATAVERSE_AUTH_OIDC_CLIENT_ID: test + DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 + DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL: http://keycloak.mydomain.com:8090/realms/test + DATAVERSE_JSF_REFRESH_PERIOD: "1" + JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 + -Ddataverse.files.file1.type=file + -Ddataverse.files.file1.label=Filesystem + -Ddataverse.files.file1.directory=${STORAGE_DIR}/store + -Ddataverse.files.localstack1.type=s3 + -Ddataverse.files.localstack1.label=LocalStack + -Ddataverse.files.localstack1.custom-endpoint-url=http://localstack:4566 + -Ddataverse.files.localstack1.custom-endpoint-region=us-east-2 + -Ddataverse.files.localstack1.bucket-name=mybucket + -Ddataverse.files.localstack1.path-style-access=true + -Ddataverse.files.localstack1.upload-redirect=false + -Ddataverse.files.localstack1.access-key=default + -Ddataverse.files.localstack1.secret-key=default + -Ddataverse.files.minio1.type=s3 + -Ddataverse.files.minio1.label=MinIO + -Ddataverse.files.minio1.custom-endpoint-url=http://minio:9000 + -Ddataverse.files.minio1.custom-endpoint-region=us-east-1 + -Ddataverse.files.minio1.bucket-name=mybucket + -Ddataverse.files.minio1.path-style-access=true + -Ddataverse.files.minio1.upload-redirect=false + -Ddataverse.files.minio1.access-key=minioadmin + -Ddataverse.files.minio1.secret-key=minioadmin ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) @@ -156,6 +178,42 @@ services: networks: - dataverse + dev_localstack: + container_name: "dev_localstack" + hostname: "localstack" + image: localstack/localstack:2.3.2 + restart: on-failure + ports: + - "127.0.0.1:4566:4566" + environment: + - DEBUG=${DEBUG-} + - DOCKER_HOST=unix:///var/run/docker.sock + - HOSTNAME_EXTERNAL=localstack + networks: + - dataverse + volumes: + - ./conf/localstack:/etc/localstack/init/ready.d + tmpfs: + - /localstack:mode=770,size=128M,uid=1000,gid=1000 + + dev_minio: + container_name: "dev_minio" + hostname: "minio" + image: minio/minio + restart: on-failure + ports: + - "9000:9000" + - "9001:9001" + networks: + - dataverse + volumes: + - minio_storage:/data + environment: + # these are the defaults but are here for clarity + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + command: server /data + networks: dataverse: driver: bridge diff --git a/pom.xml b/pom.xml index 4d10073334f..34b0ad2e835 100644 --- a/pom.xml +++ b/pom.xml @@ -612,6 +612,11 @@ 3.0.0 test + + org.testcontainers + localstack + test + From 4ad06ba1af38cf84f5b639a605eecaf95a4fe8b1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 30 Nov 2023 15:54:20 -0500 Subject: [PATCH 0341/1112] rename previewshavefailed to previewimagefail #9506 This matches previewimageavailable, also in dvobject. Plus it's clear we aren't talking about shaving. :) --- .../edu/harvard/iq/dataverse/DataFileServiceBean.java | 2 +- .../iq/dataverse/DatasetVersionServiceBean.java | 4 ++-- src/main/java/edu/harvard/iq/dataverse/DvObject.java | 10 +++++----- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 2 +- .../iq/dataverse/dataaccess/ImageThumbConverter.java | 4 ++-- .../migration/V6.0.0.5__9506-track-thumb-failures.sql | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index fae95f12a0c..446c66e5a8b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -987,7 +987,7 @@ public boolean isThumbnailAvailable (DataFile file) { this.save(file); return true; } - file.setPreviewsHaveFailed(true); + file.setPreviewImageFail(true); this.save(file); return false; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index d209f7d9e26..1ee517c9831 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -825,7 +825,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND df.id = o.id " + "AND fm.datasetversion_id = dv.id " + "AND fm.datafile_id = df.id " - + "AND o.previewshavefailed = false " + + "AND o.previewimagefail = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + "AND df.contenttype LIKE 'image/%' " @@ -859,7 +859,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND df.id = o.id " + "AND fm.datasetversion_id = dv.id " + "AND fm.datafile_id = df.id " - + "AND o.previewshavefailed = false " + + "AND o.previewimagefail = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + "AND df.contenttype = 'application/pdf' " diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 12f0b63b3a1..c6d4a73bfd9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -191,14 +191,14 @@ public void setPreviewImageAvailable(boolean status) { * real failure in generating the thumbnail. In both cases, we won't want to try * again every time the preview/thumbnail is requested for a view. */ - private boolean previewsHaveFailed; + private boolean previewImageFail; - public boolean isPreviewsHaveFailed() { - return previewsHaveFailed; + public boolean isPreviewImageFail() { + return previewImageFail; } - public void setPreviewsHaveFailed(boolean previewsHaveFailed) { - this.previewsHaveFailed = previewsHaveFailed; + public void setPreviewImageFail(boolean previewImageFail) { + this.previewImageFail = previewImageFail; } public Timestamp getModificationTime() { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 2c2f49a0444..b1d31f8d44b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2429,7 +2429,7 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur @DELETE @Path("/clearThumbnailFailureFlag") public Response clearThumbnailFailureFlag() { - em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE").executeUpdate(); + em.createNativeQuery("UPDATE dvobject SET previewimagefail = FALSE").executeUpdate(); return ok("Thumnail Failure Flags cleared."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index febf659b71a..2de37174a3b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -119,9 +119,9 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } private static boolean generateThumbnail(DataFile file, StorageIO storageIO, int size) { - logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); + logger.log(Level.FINE, (file.isPreviewImageFail() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: - if (!file.isPreviewsHaveFailed()) { + if (!file.isPreviewImageFail()) { boolean thumbnailGenerated = false; if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { thumbnailGenerated = generateImageThumbnail(storageIO, size); diff --git a/src/main/resources/db/migration/V6.0.0.5__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V6.0.0.5__9506-track-thumb-failures.sql index 9b12d27db91..156960d2011 100644 --- a/src/main/resources/db/migration/V6.0.0.5__9506-track-thumb-failures.sql +++ b/src/main/resources/db/migration/V6.0.0.5__9506-track-thumb-failures.sql @@ -1 +1 @@ -ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS previewshavefailed BOOLEAN DEFAULT FALSE; \ No newline at end of file +ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS previewimagefail BOOLEAN DEFAULT FALSE; From 7148158dec36576c33c1cbc96143128769dd938a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 30 Nov 2023 15:56:43 -0500 Subject: [PATCH 0342/1112] add tests #9506 --- .../java/edu/harvard/iq/dataverse/api/AdminIT.java | 10 ++++++++++ .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 14 +++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 0c5de662e8a..c29c8619d8c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -818,6 +818,16 @@ public void testLoadMetadataBlock_ErrorHandling() { message ); } + @Test + public void testClearThumbnailFailureFlag(){ + Response nonExistentFile = UtilIT.clearThumbnailFailureFlag(Long.MAX_VALUE); + nonExistentFile.prettyPrint(); + nonExistentFile.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + Response clearAllFlags = UtilIT.clearThumbnailFailureFlags(); + clearAllFlags.prettyPrint(); + clearAllFlags.then().assertThat().statusCode(OK.getStatusCode()); + } @Test public void testBannerMessages(){ diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 9b264086c27..58edbae18e0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -223,7 +223,19 @@ public static Response validateDataFileHashValue(String fileId, String apiToken .post("/api/admin/validateDataFileHashValue/" + fileId + "?key=" + apiToken); return response; } - + + public static Response clearThumbnailFailureFlags() { + Response response = given() + .delete("/api/admin/clearThumbnailFailureFlag"); + return response; + } + + public static Response clearThumbnailFailureFlag(long fileId) { + Response response = given() + .delete("/api/admin/clearThumbnailFailureFlag/" + fileId); + return response; + } + private static String getAuthenticatedUserAsJsonString(String persistentUserId, String firstName, String lastName, String authenticationProviderId, String identifier) { JsonObjectBuilder builder = Json.createObjectBuilder(); builder.add("authenticationProviderId", authenticationProviderId); From 67502ca2326b0536077ad96eb0fe497ca70f37f6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 30 Nov 2023 15:58:18 -0500 Subject: [PATCH 0343/1112] fix typos #9506 --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b1d31f8d44b..1445db81e4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2430,7 +2430,7 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur @Path("/clearThumbnailFailureFlag") public Response clearThumbnailFailureFlag() { em.createNativeQuery("UPDATE dvobject SET previewimagefail = FALSE").executeUpdate(); - return ok("Thumnail Failure Flags cleared."); + return ok("Thumbnail Failure Flags cleared."); } @DELETE @@ -2441,7 +2441,7 @@ public Response clearThumbnailFailureFlagByDatafile(@PathParam("id") String file Query deleteQuery = em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE where id = ?"); deleteQuery.setParameter(1, df.getId()); deleteQuery.executeUpdate(); - return ok("Thumnail Failure Flag cleared for file id=: " + df.getId() + "."); + return ok("Thumbnail Failure Flag cleared for file id=: " + df.getId() + "."); } catch (WrappedResponse r) { logger.info("Could not find file with the id: " + fileId); return error(Status.BAD_REQUEST, "Could not find file with the id: " + fileId); From 82f0bc0eef833388b3e20bf48fe8bb46163640ee Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 30 Nov 2023 15:59:05 -0500 Subject: [PATCH 0344/1112] one more rename to previewimagefail #9506 This should have been part of 4ad06ba1a. --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 1445db81e4c..4cb0521d218 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2438,7 +2438,7 @@ public Response clearThumbnailFailureFlag() { public Response clearThumbnailFailureFlagByDatafile(@PathParam("id") String fileId) { try { DataFile df = findDataFileOrDie(fileId); - Query deleteQuery = em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE where id = ?"); + Query deleteQuery = em.createNativeQuery("UPDATE dvobject SET previewimagefail = FALSE where id = ?"); deleteQuery.setParameter(1, df.getId()); deleteQuery.executeUpdate(); return ok("Thumbnail Failure Flag cleared for file id=: " + df.getId() + "."); From de2f9a4f6beaad2e34249616dd39748c29e15701 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 30 Nov 2023 16:37:35 -0500 Subject: [PATCH 0345/1112] popup separate tab for single file download transfer --- .../iq/dataverse/FileDownloadServiceBean.java | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 7a03f1a35dc..ca3f5b4bded 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -20,6 +20,8 @@ import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.URLTokenUtil; + import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; @@ -310,13 +312,19 @@ private void redirectToCustomZipDownloadService(String customZipServiceUrl, Stri } } - private void redirectToDownloadAPI(String downloadType, Long fileId, boolean guestBookRecordAlreadyWritten, Long fileMetadataId) { - String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten, fileMetadataId); - logger.fine("Redirecting to file download url: " + fileDownloadUrl); - try { - FacesContext.getCurrentInstance().getExternalContext().redirect(fileDownloadUrl); - } catch (IOException ex) { - logger.info("Failed to issue a redirect to file download url (" + fileDownloadUrl + "): " + ex); + private void redirectToDownloadAPI(String downloadType, Long fileId, boolean guestBookRecordAlreadyWritten, + Long fileMetadataId) { + String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten, + fileMetadataId); + if (downloadType.equals("GlobusTransfer")) { + PrimeFaces.current().executeScript(URLTokenUtil.getScriptForUrl(fileDownloadUrl)); + } else { + logger.fine("Redirecting to file download url: " + fileDownloadUrl); + try { + FacesContext.getCurrentInstance().getExternalContext().redirect(fileDownloadUrl); + } catch (IOException ex) { + logger.info("Failed to issue a redirect to file download url (" + fileDownloadUrl + "): " + ex); + } } } From c82064ace53bcbf5e8b04a24f916fa333f863c9c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 30 Nov 2023 16:38:17 -0500 Subject: [PATCH 0346/1112] fix old label in popup required case --- src/main/webapp/file-download-button-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 8ef2af40431..318aab1454e 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -80,7 +80,7 @@ - GT: #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} + #{bundle['file.globus.of']} #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} From 2644faee02f7001e51d19e474e3ca5b1b1264302 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 30 Nov 2023 18:03:29 -0500 Subject: [PATCH 0347/1112] Rearranges the code that updates the Storage Use records to reflect the size of the saved content. #8549 --- .../dataverse/ingest/IngestServiceBean.java | 120 +++++++++++------- 1 file changed, 76 insertions(+), 44 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 9b3ddd228e9..5efb4c06f48 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -177,7 +177,7 @@ public class IngestServiceBean { // It must be called before we attempt to permanently save the files in // the database by calling the Save command on the dataset and/or version. - // There is way too much going on in this method. :( + // !! There is way too much going on in this method. :( !! // @todo: Is this method a good candidate for turning into a dedicated Command? public List saveAndAddFilesToDataset(DatasetVersion version, @@ -195,6 +195,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, // renamed FOOBAR-1.txt back to FOOBAR.txt... IngestUtil.checkForDuplicateFileNamesFinal(version, newFiles, fileToReplace); Dataset dataset = version.getDataset(); + long totalBytesSaved = 0L; if (systemConfig.isStorageQuotasEnforced()) { // Check if this dataset is subject to any storage quotas: @@ -205,6 +206,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, boolean unattached = false; boolean savedSuccess = false; if (dataFile.getOwner() == null) { + // is it ever "unattached"? + // do we ever call this method with dataFile.getOwner() != null? + // - we really shouldn't be, either. unattached = true; dataFile.setOwner(dataset); } @@ -230,31 +234,38 @@ public List saveAndAddFilesToDataset(DatasetVersion version, dataAccess = DataAccess.createNewStorageIO(dataFile, storageLocation); logger.fine("Successfully created a new storageIO object."); - /* - * This commented-out code demonstrates how to copy bytes from a local - * InputStream (or a readChannel) into the writable byte channel of a Dataverse - * DataAccessIO object: + /** + * This commented-out code demonstrates how to copy + * bytes from a local InputStream (or a readChannel) + * into the writable byte channel of a Dataverse + * DataAccessIO object: */ - /* - * storageIO.open(DataAccessOption.WRITE_ACCESS); - * - * writeChannel = storageIO.getWriteChannel(); readChannel = new - * FileInputStream(tempLocationPath.toFile()).getChannel(); - * - * long bytesPerIteration = 16 * 1024; // 16K bytes long start = 0; while ( - * start < readChannel.size() ) { readChannel.transferTo(start, - * bytesPerIteration, writeChannel); start += bytesPerIteration; } + /** + * storageIO.open(DataAccessOption.WRITE_ACCESS); + * + * writeChannel = storageIO.getWriteChannel(); + * readChannel = new + * FileInputStream(tempLocationPath.toFile()).getChannel(); + * + * long bytesPerIteration = 16 * 1024; // 16K bytes long + * start = 0; + * while ( start < readChannel.size() ) { + * readChannel.transferTo(start, bytesPerIteration, writeChannel); start += bytesPerIteration; + * } */ - /* - * But it's easier to use this convenience method from the DataAccessIO: - * - * (if the underlying storage method for this file is local filesystem, the - * DataAccessIO will simply copy the file using Files.copy, like this: - * - * Files.copy(tempLocationPath, storageIO.getFileSystemLocation(), - * StandardCopyOption.REPLACE_EXISTING); + /** + * But it's easier to use this convenience method from + * the DataAccessIO: + * + * (if the underlying storage method for this file is + * local filesystem, the DataAccessIO will simply copy + * the file using Files.copy, like this: + * + * Files.copy(tempLocationPath, + * storageIO.getFileSystemLocation(), + * StandardCopyOption.REPLACE_EXISTING); */ dataAccess.savePath(tempLocationPath); @@ -265,7 +276,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, savedSuccess = true; logger.fine("Success: permanently saved file " + dataFile.getFileMetadata().getLabel()); - // TODO: reformat this file to remove the many tabs added in cc08330 + // TODO: reformat this file to remove the many tabs added in cc08330 - done, I think? extractMetadataNcml(dataFile, tempLocationPath); } catch (IOException ioex) { @@ -375,6 +386,15 @@ public List saveAndAddFilesToDataset(DatasetVersion version, if (savedSuccess) { if (uploadSessionQuota != null) { + // It may be worth considering refreshing the quota here, + // and incrementing the Storage Use record for + // all the parent objects in real time, as + // *each* individual file is being saved. I experimented + // with that, but decided against it for performance + // reasons. But yes, there may be some edge case where + // parallel multi-file uploads can end up being able + // to save 2X worth the quota that was available at the + // beginning of each session. if (confirmedFileSize > uploadSessionQuota.getRemainingQuotaInBytes()) { savedSuccess = false; logger.warning("file size over quota limit, skipping"); @@ -382,7 +402,6 @@ public List saveAndAddFilesToDataset(DatasetVersion version, // this (potentially partial) failure to the user. //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(confirmedFileSize), bytesToHumanReadable(storageQuotaLimit))); } else { - // Adjust quota: logger.info("Setting total usage in bytes to " + (uploadSessionQuota.getTotalUsageInBytes() + confirmedFileSize)); uploadSessionQuota.setTotalUsageInBytes(uploadSessionQuota.getTotalUsageInBytes() + confirmedFileSize); @@ -390,19 +409,12 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } // ... unless we had to reject the file just now because of - // the quota limits, increment the storage use record(s): + // the quota limits, count the number of bytes saved for the + // purposes of incrementing the total storage of the parent + // DvObjectContainers: if (savedSuccess) { - // Update storage use for all the parent dvobjects: - // @todo: Do we want to do this after after *each* file is saved? - there may be - // quite a few files being saved here all at once. We could alternatively - // perform this update only once, after this loop is completed (are there any - // risks/accuracy loss?) - // This update is performed with a direct native query that - // is supposed to be quite fast. But still. - logger.info("Incrementing recorded storage use by " + confirmedFileSize + " bytes for dataset " + dataset.getId()); - // (@todo: need to consider what happens when this code is called on Create?) - storageUseService.incrementStorageSizeRecursively(dataset.getId(), confirmedFileSize); + totalBytesSaved += confirmedFileSize; } } @@ -425,12 +437,14 @@ public List saveAndAddFilesToDataset(DatasetVersion version, boolean metadataExtracted = false; boolean metadataExtractedFromNetcdf = false; if (tabIngest && FileUtil.canIngestAsTabular(dataFile)) { - /* - * Note that we don't try to ingest the file right away - instead we mark it as - * "scheduled for ingest", then at the end of the save process it will be queued - * for async. ingest in the background. In the meantime, the file will be - * ingested as a regular, non-tabular file, and appear as such to the user, - * until the ingest job is finished with the Ingest Service. + /** + * Note that we don't try to ingest the file right away + * - instead we mark it as "scheduled for ingest", then + * at the end of the save process it will be queued for + * async. ingest in the background. In the meantime, the + * file will be ingested as a regular, non-tabular file, + * and appear as such to the user, until the ingest job + * is finished with the Ingest Service. */ dataFile.SetIngestScheduled(); } else if (fileMetadataExtractable(dataFile)) { @@ -488,6 +502,10 @@ public List saveAndAddFilesToDataset(DatasetVersion version, // dataset.getGlobalId()); // Make sure the file is attached to the dataset and to the version, if this // hasn't been done yet: + // @todo: but shouldn't we be doing the reverse if we haven't been + // able to save the file? - disconnect it from the dataset and + // the version?? - L.A. 2023 + // (that said, is there *ever* a case where dataFile.getOwner() != null ?) if (dataFile.getOwner() == null) { dataFile.setOwner(dataset); @@ -503,8 +521,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, DataFileCategory dataFileCategory = dfcIt.next(); if (dataFileCategory.getDataset() == null) { - DataFileCategory newCategory = dataset - .getCategoryByName(dataFileCategory.getName()); + DataFileCategory newCategory = dataset.getCategoryByName(dataFileCategory.getName()); if (newCategory != null) { newCategory.addFileMetadata(dataFile.getFileMetadata()); // dataFileCategory = newCategory; @@ -516,10 +533,25 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } } } + + // Hmm. Noticing that the following two things - adding the + // files to the return list were being + // done outside of this "if (savedSuccess)" block. I'm pretty + // sure that was wrong. - L.A. 11-30-2023 + ret.add(dataFile); + // (unless that is that return value isn't used for anything - ?) } - ret.add(dataFile); } + // Update storage use for all the parent dvobjects: + logger.info("Incrementing recorded storage use by " + totalBytesSaved + " bytes for dataset " + dataset.getId()); + // Q. Need to consider what happens when this code is called on Create? + // A. It works on create as well, yes. (the recursive increment + // query in the method below does need the parent dataset to + // have the database id. But even if these files have been + // uploaded on the Create form, we first save the dataset, and + // then add the files to it. - L.A. + storageUseService.incrementStorageSizeRecursively(dataset.getId(), totalBytesSaved); } return ret; From dc567848bdfcc9647d0779c01bb57f93ab593d89 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 30 Nov 2023 19:10:43 -0500 Subject: [PATCH 0348/1112] making the set/delete quota commands superuser-only (doh). #8549 --- .../impl/DeleteCollectionQuotaCommand.java | 13 ++++++++++++- .../command/impl/SetCollectionQuotaCommand.java | 16 +++++++++++++--- src/main/java/propertyFiles/Bundle.properties | 1 + .../edu/harvard/iq/dataverse/api/FilesIT.java | 3 +++ 4 files changed, 29 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java index 5fcbad929a9..bdeb9c6e8cb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java @@ -6,20 +6,25 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.storageuse.StorageQuota; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.logging.Logger; /** * * @author landreev + * + * A superuser-only command: */ -@RequiredPermissions(Permission.ManageDataversePermissions) +@RequiredPermissions({}) public class DeleteCollectionQuotaCommand extends AbstractVoidCommand { private static final Logger logger = Logger.getLogger(DeleteCollectionQuotaCommand.class.getCanonicalName()); @@ -33,6 +38,12 @@ public DeleteCollectionQuotaCommand(DataverseRequest aRequest, Dataverse target) @Override public void executeImpl(CommandContext ctxt) throws CommandException { + // first check if user is a superuser + if ( (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser() ) ) { + throw new PermissionException(BundleUtil.getStringFromBundle("dataverse.storage.quota.superusersonly"), + this, null, targetDataverse); + } + if (targetDataverse == null) { throw new IllegalCommandException("", this); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java index a134cbefdb9..6b0d1bf313a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -13,14 +14,18 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.storageuse.StorageQuota; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.logging.Logger; /** * * @author landreev + * + * A superuser-only command: */ -@RequiredPermissions(Permission.ManageDataversePermissions) +@RequiredPermissions({}) public class SetCollectionQuotaCommand extends AbstractVoidCommand { private static final Logger logger = Logger.getLogger(GetCollectionQuotaCommand.class.getCanonicalName()); @@ -36,13 +41,18 @@ public SetCollectionQuotaCommand(DataverseRequest aRequest, Dataverse target, Lo @Override public void executeImpl(CommandContext ctxt) throws CommandException { + // Check if user is a superuser: + if ( (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser() ) ) { + throw new PermissionException(BundleUtil.getStringFromBundle("dataverse.storage.quota.superusersonly"), + this, null, dataverse); + } if (dataverse == null) { - throw new IllegalCommandException("", this); + throw new IllegalCommandException("Must specify valid collection", this); } if (allocation == null) { - throw new IllegalCommandException("", this); + throw new IllegalCommandException("Must specify valid allocation in bytes", this); } StorageQuota storageQuota = dataverse.getStorageQuota(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 748b674a4e1..5033426175c 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -925,6 +925,7 @@ dataverse.storage.quota.allocation=Total quota allocation for this collection: { dataverse.storage.quota.notdefined=No quota defined for this collection dataverse.storage.quota.updated=Storage quota successfully set for the collection dataverse.storage.quota.deleted=Storage quota successfully disabled for the collection +dataverse.storage.quota.superusersonly=Only superusers can change storage quotas. dataverse.storage.use=Total recorded size of the files stored in this collection (user-uploaded files plus the versions in the archival tab-delimited format when applicable): {0} bytes dataverse.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataverse. Please report this error to the Dataverse administrator. dataverse.inherited=(inherited from enclosing Dataverse) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index e391e17d8d5..915f82a6de2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2375,6 +2375,9 @@ public void testCollectionStorageQuotas() { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String username = UtilIT.getUsernameFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); From f4eee659021dfaab4dfa9c13e761b7c1875281c5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 30 Nov 2023 19:18:15 -0500 Subject: [PATCH 0349/1112] removing the license template stubs (#8549) --- .../engine/command/impl/DeleteCollectionQuotaCommand.java | 5 ----- .../engine/command/impl/GetCollectionStorageUseCommand.java | 4 ---- .../engine/command/impl/SetCollectionQuotaCommand.java | 6 ------ .../edu/harvard/iq/dataverse/storageuse/StorageQuota.java | 4 ---- .../edu/harvard/iq/dataverse/storageuse/StorageUse.java | 4 ---- .../iq/dataverse/storageuse/StorageUseServiceBean.java | 4 ---- .../iq/dataverse/storageuse/UploadSessionQuotaLimit.java | 4 ---- 7 files changed, 31 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java index bdeb9c6e8cb..4015228366b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java @@ -1,11 +1,6 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionStorageUseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionStorageUseCommand.java index 40b3128b80d..c30a5a34a81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionStorageUseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionStorageUseCommand.java @@ -1,7 +1,3 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java index 6b0d1bf313a..cf8fb6fd42e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java @@ -1,13 +1,7 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java index 0cfebe4167a..d00f7041e61 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java @@ -1,7 +1,3 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.harvard.iq.dataverse.storageuse; import edu.harvard.iq.dataverse.DvObject; diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java index 11a2a8b706c..240fba1037d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java @@ -1,7 +1,3 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.harvard.iq.dataverse.storageuse; import edu.harvard.iq.dataverse.DvObject; diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java index e92ba43e950..b542a7cd661 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java @@ -1,7 +1,3 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.harvard.iq.dataverse.storageuse; import edu.harvard.iq.dataverse.DvObjectContainer; diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/UploadSessionQuotaLimit.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/UploadSessionQuotaLimit.java index 06bbe986f70..f7dac52e886 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/UploadSessionQuotaLimit.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/UploadSessionQuotaLimit.java @@ -1,7 +1,3 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.harvard.iq.dataverse.storageuse; /** From 538921061604e4daacd864f8ec3865d6d0642561 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 1 Dec 2023 14:21:35 +0000 Subject: [PATCH 0350/1112] Stash: working on new canDownloadAtLeastOneFile Datasets API endpoint --- .../iq/dataverse/PermissionServiceBean.java | 8 ++++++ .../harvard/iq/dataverse/api/Datasets.java | 14 +++++++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 25 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 +++++ 4 files changed, 53 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index a1de33a764e..9e6628617ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -837,4 +837,12 @@ public boolean isMatchingWorkflowLock(Dataset d, String userId, String invocatio return false; } + public boolean canDownloadAtLeastOneFile(User requestUser, DatasetVersion datasetVersion) { + for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) { + if (userOn(requestUser, fileMetadata.getDataFile()).has(Permission.DownloadFile)) { + return true; + } + } + return false; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index af6059cf882..a9cfefc33d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4134,4 +4134,18 @@ public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft)); return ok(jsonObjectBuilder); } + + @GET + @AuthRequired + @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile") + public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response(req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); + return ok(permissionService.canDownloadAtLeastOneFile(getRequestUser(crc), datasetVersion)); + }, getRequestUser(crc)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index d20f1e8a58b..945b741a94b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -4121,4 +4121,29 @@ public void testGetUserPermissionsOnDataset() { Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getUserPermissionsOnDataset("testInvalidId", apiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } + + @Test + public void testGetCanDownloadAtLeastOneFile() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Call with valid dataset id + Response canDownloadAtLeastOneFileResponse = UtilIT.getCanDownloadAtLeastOneFile(Integer.toString(datasetId), DS_VERSION_LATEST, apiToken); + canDownloadAtLeastOneFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canDownloadAtLeastOneFile = JsonPath.from(canDownloadAtLeastOneFileResponse.body().asString()).getBoolean("data"); + assertTrue(canDownloadAtLeastOneFile); + + // Call with invalid dataset id + Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, apiToken); + getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 9b264086c27..bf43733788a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3442,6 +3442,12 @@ static Response getUserPermissionsOnDataset(String datasetId, String apiToken) { .get("/api/datasets/" + datasetId + "/userPermissions"); } + static Response getCanDownloadAtLeastOneFile(String datasetId, String versionId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/versions/" + versionId + "/canDownloadAtLeastOneFile"); + } + static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); jsonBuilder.add("dateAvailable", dateAvailable); From 8ec0984a663e4daa5b60049c1ee8d51004ca452c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 1 Dec 2023 09:26:39 -0500 Subject: [PATCH 0351/1112] add page on Jenkins #10101 --- doc/sphinx-guides/source/qa/index.md | 1 + doc/sphinx-guides/source/qa/jenkins.md | 44 ++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 doc/sphinx-guides/source/qa/jenkins.md diff --git a/doc/sphinx-guides/source/qa/index.md b/doc/sphinx-guides/source/qa/index.md index 08deb7ee27d..6027f07574f 100644 --- a/doc/sphinx-guides/source/qa/index.md +++ b/doc/sphinx-guides/source/qa/index.md @@ -7,4 +7,5 @@ performance-tests.md manual-testing.md test-automation.md other-approaches.md +jenkins.md ``` diff --git a/doc/sphinx-guides/source/qa/jenkins.md b/doc/sphinx-guides/source/qa/jenkins.md new file mode 100644 index 00000000000..dbfec0d60d0 --- /dev/null +++ b/doc/sphinx-guides/source/qa/jenkins.md @@ -0,0 +1,44 @@ +# Jenkins + +```{contents} Contents: +:local: +:depth: 3 +``` + +## Introduction + +Jenkins is our primary tool for knowing if our API tests are passing. (Unit tests are executed locally by developers.) + +You can find our Jenkins installation at . + +Please note that while it has been open to the public in the past, it is currently firewalled off. We can poke a hole in the firewall for your IP address if necessary. Please get in touch. (You might also be interested in which is about restoring the ability of contributors to see if their pull requests are passing API tests or not.) + +## Jobs + +Jenkins is organized into jobs. We'll highlight a few. + +### IQSS-dataverse-develop + +, which we will refer to as the "develop" job runs after pull requests are merged. It is crucial that this job stays green (passing) because we always want to stay in a "release ready" state. If you notice that this job is failing, make noise about it! + +You can get to this job from the README at . + +### IQSS-Dataverse-Develop-PR + + can be thought of as "PR jobs". It's a collection of jobs run on pull requests. Typically, you will navigate directly into the job (and it's particular build number) from a pull request. For example, from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). + +### guides.dataverse.org + + is what we use to build guides. See {doc}`/developers/making-releases` in the Developer Guide. + +## Checking if API Tests are Passing + +If API tests are failing, you should not merge the pull request. + +How can you know if API tests are passing? Here are the steps, by way of example. + +- From the pull request, navigate to the build. For example from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). +- You are now on the new "blue" interface for Jenkins. Click the button in the header called "go to classic" which should take you to (for example) . +- Click "Test Result". +- Under "All Tests", look at the duration for "edu.harvard.iq.dataverse.api". It should be ten minutes or higher. If it was only a few seconds, tests did not run. +- Assuming tests ran, if there were failures, they should appear at the top under "All Failed Tests". Inform the author of the pull request about the error. From f48f3a84a72b212d66a4bae1c1056e31dc8f7e52 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 1 Dec 2023 14:50:40 +0000 Subject: [PATCH 0352/1112] Fixed: DatasetVersionFilesServiceBean order by condition for type criteria --- .../DatasetVersionFilesServiceBean.java | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 78fd896c897..99c3c65e3b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -260,22 +260,27 @@ private Predicate createSearchCriteriaPredicate(DatasetVersion datasetVersion, return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); } - private Order createGetFileMetadatasOrder(CriteriaBuilder criteriaBuilder, - FileOrderCriteria orderCriteria, - Root fileMetadataRoot) { + private List createGetFileMetadatasOrder(CriteriaBuilder criteriaBuilder, + FileOrderCriteria orderCriteria, + Root fileMetadataRoot) { Path label = fileMetadataRoot.get("label"); Path dataFile = fileMetadataRoot.get("dataFile"); Path publicationDate = dataFile.get("publicationDate"); Path createDate = dataFile.get("createDate"); Expression orderByLifetimeExpression = criteriaBuilder.selectCase().when(publicationDate.isNotNull(), publicationDate).otherwise(createDate); - return switch (orderCriteria) { - case NameZA -> criteriaBuilder.desc(label); - case Newest -> criteriaBuilder.desc(orderByLifetimeExpression); - case Oldest -> criteriaBuilder.asc(orderByLifetimeExpression); - case Size -> criteriaBuilder.asc(dataFile.get("filesize")); - case Type -> criteriaBuilder.asc(dataFile.get("contentType")); - default -> criteriaBuilder.asc(label); - }; + List orderList = new ArrayList<>(); + switch (orderCriteria) { + case NameZA -> orderList.add(criteriaBuilder.desc(label)); + case Newest -> orderList.add(criteriaBuilder.desc(orderByLifetimeExpression)); + case Oldest -> orderList.add(criteriaBuilder.asc(orderByLifetimeExpression)); + case Size -> orderList.add(criteriaBuilder.asc(dataFile.get("filesize"))); + case Type -> { + orderList.add(criteriaBuilder.asc(dataFile.get("contentType"))); + orderList.add(criteriaBuilder.asc(label)); + } + default -> orderList.add(criteriaBuilder.asc(label)); + } + return orderList; } private long getOriginalTabularFilesSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { From a142ac82e7315370755f11245c38f388f7580b12 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 1 Dec 2023 12:51:55 -0500 Subject: [PATCH 0353/1112] Adds description about the "go to classic" button --- doc/sphinx-guides/source/qa/jenkins.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/qa/jenkins.md b/doc/sphinx-guides/source/qa/jenkins.md index dbfec0d60d0..a4ca4d8688f 100644 --- a/doc/sphinx-guides/source/qa/jenkins.md +++ b/doc/sphinx-guides/source/qa/jenkins.md @@ -38,7 +38,7 @@ If API tests are failing, you should not merge the pull request. How can you know if API tests are passing? Here are the steps, by way of example. - From the pull request, navigate to the build. For example from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). -- You are now on the new "blue" interface for Jenkins. Click the button in the header called "go to classic" which should take you to (for example) . +- You are now on the new "blue" interface for Jenkins. Click the button with an arrow on the right side of the header called "go to classic" which should take you to (for example) . - Click "Test Result". - Under "All Tests", look at the duration for "edu.harvard.iq.dataverse.api". It should be ten minutes or higher. If it was only a few seconds, tests did not run. - Assuming tests ran, if there were failures, they should appear at the top under "All Failed Tests". Inform the author of the pull request about the error. From a29942bf4c8c78d7dee34d61fbb73f44b8ec699e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:08:26 -0500 Subject: [PATCH 0354/1112] add files not accessible by dataverse flag --- .../dataaccess/AbstractRemoteOverlayAccessIO.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java index 9de6bf69832..16defc26a4f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java @@ -48,6 +48,11 @@ public abstract class AbstractRemoteOverlayAccessIO extends static final String URL_EXPIRATION_MINUTES = "url-expiration-minutes"; protected static final String REMOTE_STORE_NAME = "remote-store-name"; protected static final String REMOTE_STORE_URL = "remote-store-url"; + + // Whether Dataverse can access the file bytes + //Currently True for the Globus store when using the S3Connector, and Remote Stores like simple web servers where the URLs resolve to the actual file bits + static final String FILES_NOT_ACCESSIBLE_BY_DATAVERSE = "files-not-accessible-by-dataverse"; + protected StorageIO baseStore = null; protected String path = null; protected PoolingHttpClientConnectionManager cm = null; @@ -329,6 +334,10 @@ protected String getStoragePath() throws IOException { logger.fine("fullStoragePath: " + fullStoragePath); return fullStoragePath; } + + public static boolean isNotDataverseAccessible(String storeId) { + return Boolean.parseBoolean(StorageIO.getConfigParamForDriver(storeId, FILES_NOT_ACCESSIBLE_BY_DATAVERSE)); + } From 0d758398b64521e65c0d0d90d963aeb7b01af42d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:09:03 -0500 Subject: [PATCH 0355/1112] add Globus store to the normal file upload (as for the remote store) --- .../java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java index 4a4d3f57f83..a1bcbe49327 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java @@ -263,7 +263,8 @@ public static StorageIO createNewStorageIO(T dvObject, S storageIO = new S3AccessIO<>(dvObject, null, storageDriverId); break; case REMOTE: - storageIO = createNewStorageIO(dvObject, storageTag, RemoteOverlayAccessIO.getBaseStoreIdFor(storageDriverId)) ; + case GLOBUS: + storageIO = createNewStorageIO(dvObject, storageTag, AbstractRemoteOverlayAccessIO.getBaseStoreIdFor(storageDriverId)) ; break; default: logger.warning("Could not find storage driver for: " + storageTag); From ce8bb6e97ff776777b642ceafb3c1fb7bae6129f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:10:28 -0500 Subject: [PATCH 0356/1112] add Globus as a download option in file table header requires changes to startGlobusTransfer in separate commit --- src/main/webapp/dataset.xhtml | 2 +- src/main/webapp/filesFragment.xhtml | 21 ++++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 2f76197e508..0b8983a7770 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -230,7 +230,7 @@
  • - +
  • diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index fbc48a0e884..3d28e3170f7 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -436,7 +436,7 @@
    + and !(DatasetPage.isVersionHasTabular()||DatasetPage.isVersionHasGlobus())}"> #{bundle.download}
    -
    + and (DatasetPage.isVersionHasTabular()||DatasetPage.isVersionHasGlobus())}">
    From 8e75a3e2f501b3f0e09fbc9cba9041c52f769737 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:11:56 -0500 Subject: [PATCH 0357/1112] Add logic for Globus transfer of some files --- .../edu/harvard/iq/dataverse/DatasetPage.java | 112 +++++++++++++----- 1 file changed, 81 insertions(+), 31 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index a663b8588ad..0b0d0a2e4f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -11,6 +11,9 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.SwiftAccessIO; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; @@ -361,6 +364,7 @@ public void setSelectedHostDataverse(Dataverse selectedHostDataverse) { * other boolean. */ private boolean versionHasTabular = false; + private boolean versionHasGlobus = false; private boolean showIngestSuccess; @@ -2183,10 +2187,19 @@ private String init(boolean initFull) { // the total "originals" size of the dataset with direct custom queries; // then we'll be able to drop the lookup hint for DataTable from the // findDeep() method for the version and further speed up the lookup - // a little bit. + // a little bit. + boolean globusDownloadEnabled = systemConfig.isGlobusDownload(); for (FileMetadata fmd : workingVersion.getFileMetadatas()) { - if (fmd.getDataFile().isTabularData()) { + DataFile df = fmd.getDataFile(); + if (df.isTabularData()) { versionHasTabular = true; + } + if(globusDownloadEnabled) { + if(GlobusAccessibleStore.isGlobusAccessible(DataAccess.getStorageDriverFromIdentifier(df.getStorageIdentifier()))) { + versionHasGlobus= true; + } + } + if(versionHasTabular &&(!globusDownloadEnabled || versionHasGlobus)) { break; } } @@ -2483,6 +2496,10 @@ private DefaultTreeNode createFileTreeNode(FileMetadata fileMetadata, TreeNode p public boolean isVersionHasTabular() { return versionHasTabular; } + + public boolean isVersionHasGlobus() { + return versionHasGlobus; + } public boolean isReadOnly() { return readOnly; @@ -3089,6 +3106,16 @@ public void setSelectedNonDownloadableFiles(List selectedNonDownlo this.selectedNonDownloadableFiles = selectedNonDownloadableFiles; } + private List selectedGlobusTransferableFiles; + + public List getSelectedGlobusTransferableFiles() { + return selectedGlobusTransferableFiles; + } + + public void setSelectedGlobusTransferableFiles(List selectedGlobusTransferableFiles) { + this.selectedGlobusTransferableFiles = selectedGlobusTransferableFiles; + } + public String getSizeOfDataset() { return DatasetUtil.getDownloadSize(workingVersion, false); } @@ -3247,8 +3274,8 @@ public boolean validateFilesForDownload(boolean downloadOriginal){ } } - //if there are two or more files with a total size - //over the zip limit post a "too large" popup + //if there are two or more files, with a total size + //over the zip limit, post a "too large" popup if (bytes > settingsWrapper.getZipDownloadLimit() && selectedDownloadableFiles.size() > 1) { setValidateFilesOutcome("FailSize"); return false; @@ -3257,16 +3284,17 @@ public boolean validateFilesForDownload(boolean downloadOriginal){ // If some of the files were restricted and we had to drop them off the // list, and NONE of the files are left on the downloadable list // - we show them a "you're out of luck" popup: - if (getSelectedDownloadableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty()) { + if (getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty()) { setValidateFilesOutcome("FailRestricted"); return false; } - if (!getSelectedDownloadableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty()) { + if (!(getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty()) + && !getSelectedNonDownloadableFiles().isEmpty()) { setValidateFilesOutcome("Mixed"); return true; } - + //ToDo - should Mixed not trigger this? if (isTermsPopupRequired() || isGuestbookPopupRequiredAtDownload()) { setValidateFilesOutcome("GuestbookRequired"); } @@ -3302,12 +3330,25 @@ private boolean filterSelectedFiles(){ setSelectedNonDownloadableFiles(new ArrayList<>()); setSelectedRestrictedFiles(new ArrayList<>()); setSelectedUnrestrictedFiles(new ArrayList<>()); + setSelectedGlobusTransferableFiles(new ArrayList<>()); boolean someFiles = false; + boolean globusDownloadEnabled = systemConfig.isGlobusDownload(); for (FileMetadata fmd : this.selectedFiles){ - if(this.fileDownloadHelper.canDownloadFile(fmd)){ + boolean downloadable=this.fileDownloadHelper.canDownloadFile(fmd); + + boolean globusTransferable = false; + if(globusDownloadEnabled) { + String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier()); + globusTransferable = GlobusAccessibleStore.isGlobusAccessible(driverId); + downloadable = downloadable && !AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(driverId); + } + if(downloadable){ getSelectedDownloadableFiles().add(fmd); someFiles=true; + } else if(globusTransferable) { + getSelectedGlobusTransferableFiles().add(fmd); + someFiles=true; } else { getSelectedNonDownloadableFiles().add(fmd); } @@ -5247,7 +5288,7 @@ public boolean isFileAccessRequestMultiButtonEnabled(){ } return false; } - +/* These appear to be unused - toDo - delete private Boolean downloadButtonAllEnabled = null; public boolean isDownloadAllButtonEnabled() { @@ -5276,7 +5317,7 @@ public boolean isDownloadSelectedButtonEnabled(){ } return false; } - +*/ public boolean isFileAccessRequestMultiSignUpButtonRequired(){ if (isSessionUserAuthenticated()){ return false; @@ -6277,28 +6318,37 @@ public boolean isHasPublicStore() { return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.PublicInstall, StorageIO.isPublicStore(dataset.getEffectiveStorageDriverId())); } - public void startGlobusTransfer() { - ApiToken apiToken = null; - User user = session.getUser(); - if (user instanceof AuthenticatedUser) { - apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); - } else if (user instanceof PrivateUrlUser) { - PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; - PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); - apiToken = new ApiToken(); - apiToken.setTokenString(privUrl.getToken()); - } - if(fileMetadataForAction!=null) { - List downloadFMList = new ArrayList(1); - downloadFMList.add(fileMetadataForAction); - PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, downloadFMList)); - } else { - if(getSelectedDownloadableFiles()!=null) { - PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, getSelectedDownloadableFiles())); + public void startGlobusTransfer(boolean transferAll) { + if(transferAll) { + this.setSelectedFiles(workingVersion.getFileMetadatas()); + } + boolean validated = validateFilesForDownload(true); + if (validated) { + ApiToken apiToken = null; + User user = session.getUser(); + if (user instanceof AuthenticatedUser) { + apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); + } else if (user instanceof PrivateUrlUser) { + PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; + PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); + apiToken = new ApiToken(); + apiToken.setTokenString(privUrl.getToken()); + } + if (fileMetadataForAction != null) { + List downloadFMList = new ArrayList(1); + downloadFMList.add(fileMetadataForAction); + PrimeFaces.current() + .executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, downloadFMList)); } else { - //ToDo: For non-public, need the subset that are downloadable by the user - //ToDo: For mixed (some in backing store), need the ones in the globus store - PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, workingVersion.getFileMetadatas())); + if (getSelectedGlobusTransferableFiles() != null) { + PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, + getSelectedGlobusTransferableFiles())); + } else { + // ToDo: For non-public, need the subset that are downloadable by the user + // ToDo: For mixed (some in backing store), need the ones in the globus store + PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, + workingVersion.getFileMetadatas())); + } } } } From 0e91e6ae59020991513add7e14e09c69641ee71e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:12:20 -0500 Subject: [PATCH 0358/1112] Convenience method to get store id for a file --- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index df0c3e5a019..776d04e98cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1797,5 +1797,11 @@ public static boolean isActivelyEmbargoed(List fmdList) { } return false; } + + + public static String getStorageDriver(DataFile dataFile) { + String storageIdentifier = dataFile.getStorageIdentifier(); + return storageIdentifier.substring(0, storageIdentifier.indexOf(DataAccess.SEPARATOR)); + } } From e5bf3001e39bf8362f9025e85cf3f6626baf15d0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:14:41 -0500 Subject: [PATCH 0359/1112] skip inaccessible files when doing validatation --- .../command/impl/FinalizeDatasetPublicationCommand.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index 3da087addd9..89cfc732455 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -32,15 +32,13 @@ import java.util.logging.Logger; import edu.harvard.iq.dataverse.GlobalIdServiceBean; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.util.FileUtil; import java.util.ArrayList; import java.util.concurrent.Future; import org.apache.solr.client.solrj.SolrServerException; -import jakarta.ejb.EJB; -import jakarta.inject.Inject; - /** * @@ -350,7 +348,8 @@ private void validateDataFiles(Dataset dataset, CommandContext ctxt) throws Comm // (the decision was made to validate all the files on every // major release; we can revisit the decision if there's any // indication that this makes publishing take significantly longer. - if (maxFileSize == -1 || dataFile.getFilesize() < maxFileSize) { + String driverId = FileUtil.getStorageDriver(dataFile); + if(StorageIO.isDataverseAccessible(driverId) && maxFileSize == -1 || dataFile.getFilesize() < maxFileSize) { FileUtil.validateDataFileChecksum(dataFile); } else { From 534c99bb0376aeaa25f2d9d54cbe68a8bfb3b6bc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:15:23 -0500 Subject: [PATCH 0360/1112] Convenience method re: store supports globus access --- .../iq/dataverse/dataaccess/GlobusAccessibleStore.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java index aad1dab5eab..d827e40e807 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java @@ -58,4 +58,11 @@ public static String getGlobusToken(String storeId) { return StorageIO.getConfigParamForDriver(storeId, GLOBUS_TOKEN); } + public static boolean isGlobusAccessible(String storeId) { + if(StorageIO.getConfigParamForDriver(storeId, StorageIO.TYPE).equals(DataAccess.GLOBUS)) { + return true; + } + return false; + } + } From ca1a4f1267b2d52cd38054cca61fbddf6941522b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:16:12 -0500 Subject: [PATCH 0361/1112] Update to use new isNotDataverseAccessible method in getInputStream --- .../iq/dataverse/dataaccess/GlobusOverlayAccessIO.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 7ec1e2f9e73..3e72fa85d35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -232,7 +232,9 @@ public long retrieveSizeFromMedia() { @Override public InputStream getInputStream() throws IOException { - if(Boolean.parseBoolean(getConfigParam("endpoint-maps-to-base-store"))) { + //Currently only supported when using an S3 store with the Globus S3Connector. + //ToDo: Support when using a managed Globus endpoint that supports http access + if(!AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(endpoint)) { return baseStore.getInputStream(); } else { throw new IOException("Not implemented"); From f39fa0715e81aafefd14c92c50171eb436a45491 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:17:03 -0500 Subject: [PATCH 0362/1112] Convenience method isDataverseAccessible --- .../edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 14fc9254c59..51cdecf64a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -57,6 +57,8 @@ public abstract class StorageIO { static final String UPLOAD_REDIRECT = "upload-redirect"; static final String UPLOAD_OUT_OF_BAND = "upload-out-of-band"; protected static final String DOWNLOAD_REDIRECT = "download-redirect"; + protected static final String DATAVERSE_INACCESSIBLE = "dataverse-inaccessible"; + public StorageIO() { @@ -620,6 +622,11 @@ public static boolean isDirectUploadEnabled(String driverId) { || Boolean.parseBoolean(getConfigParamForDriver(driverId, UPLOAD_OUT_OF_BAND)); } + //True by default, Stores (e.g. RemoteOverlay, Globus) can set this false to stop attempts to read bytes + public static boolean isDataverseAccessible(String driverId) { + return (true && !Boolean.parseBoolean(getConfigParamForDriver(driverId, DATAVERSE_INACCESSIBLE))); + } + // Check that storageIdentifier is consistent with store's config // False will prevent direct uploads static boolean isValidIdentifier(String driverId, String storageId) { From dc4580232dcfe698010cdc4c20fb77c19482484b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 14:18:05 -0500 Subject: [PATCH 0363/1112] use correct term (though up and down terms are the same) could also fix for native/http, but not for rsync --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index e40f55fedd8..3c6992f8ec3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -941,7 +941,7 @@ public boolean isHTTPDownload() { } public boolean isGlobusDownload() { - return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), false); + return getMethodAvailable(FileDownloadMethods.GLOBUS.toString(), false); } public boolean isGlobusFileDownload() { From 0bfbb10c355ea1ebc24d2d8bee928c50ca22db41 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 1 Dec 2023 16:59:38 -0500 Subject: [PATCH 0364/1112] "manage collections" guide entry. #8549 --- .../source/admin/collectionquotas.rst | 17 +++++++++++++++++ doc/sphinx-guides/source/admin/index.rst | 1 + doc/sphinx-guides/source/api/native-api.rst | 12 +++++++++++- .../iq/dataverse/storageuse/StorageUse.java | 3 +++ 4 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 doc/sphinx-guides/source/admin/collectionquotas.rst diff --git a/doc/sphinx-guides/source/admin/collectionquotas.rst b/doc/sphinx-guides/source/admin/collectionquotas.rst new file mode 100644 index 00000000000..883b6cf0c93 --- /dev/null +++ b/doc/sphinx-guides/source/admin/collectionquotas.rst @@ -0,0 +1,17 @@ +Storage Quotas for Collections +============================== + +Please note that this is a new and still experimental feature (as of Dataverse v6.1 release). + +Instance admins can now define storage quota limits for specific collections. These limits can be set, changed and/or deleted via the provided APIs (please see the :ref:`collection-storage-quotas` section of the :doc:`/api/native-api` guide). The Read version of the API is available to the individual collection admins (i.e., a collection owner can check on the quota configured for their collection), but only superusers can set, change or disable storage quotas. + +Storage quotas are *inherited* by subcollections. In other words, when storage use limit is set for a specific collection, it applies to all the datasets immediately under it and in its sub-collections, unless different quotas are defined there and so on. Each file added to any dataset in that hierarchy counts for the purposes of the quota limit defined for the top collection. A storage quota defined on a child sub-collection overrides whatever quota that may be defined on the parent, or inherited from an ancestor. + +For example, a collection ``A`` has the storage quota set to 10GB. It has 3 sub-collections, ``B``, ``C`` and ``D``. Users can keep uploading files into the datasets anywhere in this hierarchy until the combined size of 10GB is reached between them. However, if an admin has reasons to limit one of the sub-collections, ``B`` to 3GB only, that quota can be explicitly set there. This both limits the growth of ``B`` to 3GB, and also *guarantees* that allocation to it. I.e. the contributors to collection ``B`` will be able to keep adding data until the 3GB limit is reached, even after the parent collection ``A`` reaches the combined 10GB limit (at which point ``A`` and all its subcollections except for ``B`` will become read-only). + +We do not yet know whether this is going to be a popular, or needed use case - a child collection quota that is different from the quota it inherits from a parent. It is likely that for many instances it will be sufficient to be able to define quotas for collections and have them apply to all the child objects underneath. We will examine the response to this feature and consider making adjustments to this scheme based on it. We are already considering introducing other types of quotas, such as limits by users or specific storage volumes. + +Please note that only the sizes of the main datafiles and the archival tab-delimited format versions, as produced by the ingest process are counted for the purposes of enforcing the limits. Automatically generated "auxiliary" files, such as rescaled image thumbnails and metadata exports for datasets are not. + +When quotas are set and enforced, the users will be informed of the remaining storage allocation on the file upload page together with other upload and processing limits. + diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst index ac81aa737a7..633842044b4 100755 --- a/doc/sphinx-guides/source/admin/index.rst +++ b/doc/sphinx-guides/source/admin/index.rst @@ -27,6 +27,7 @@ This guide documents the functionality only available to superusers (such as "da solr-search-index ip-groups mail-groups + collectionquotas monitoring reporting-tools-and-queries maintenance diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 234d5f37232..7bd334f6a95 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -763,7 +763,8 @@ Collection Storage Quotas curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storage/quota" -Will output the storage quota allocated (in bytes), or a message indicating that the quota is not defined for the collection. +Will output the storage quota allocated (in bytes), or a message indicating that the quota is not defined for the specific collection. The user identified by the API token must have the ``Manage`` permission on the collection. + To set or change the storage allocation quota for a collection: @@ -771,13 +772,22 @@ To set or change the storage allocation quota for a collection: curl -X PUT -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storage/quota/$SIZE_IN_BYTES" +This is API is superuser-only. + + To delete a storage quota configured for a collection: .. code-block:: curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storage/quota" +This is API is superuser-only. + +Use the ``/settings`` API to enable or disable the enforcement of storage quotas that are defined across the instance via the following setting. For example, + +.. code-block:: + curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:UseStorageQuotas Datasets diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java index 240fba1037d..b777736dc8d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java @@ -7,10 +7,12 @@ import jakarta.persistence.GenerationType; import jakarta.persistence.GeneratedValue; import jakarta.persistence.Id; +import jakarta.persistence.Index; import jakarta.persistence.JoinColumn; import jakarta.persistence.NamedQueries; import jakarta.persistence.NamedQuery; import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; import java.io.Serializable; /** @@ -23,6 +25,7 @@ @NamedQuery(name = "StorageUse.incrementByteSizeByDvContainerId", query = "UPDATE StorageUse su SET su.sizeInBytes = su.sizeInBytes +:fileSize WHERE su.dvObjectContainer.id =:dvObjectId") }) @Entity +@Table(indexes = {@Index(columnList="dvobjectcontainer_id")}) public class StorageUse implements Serializable { private static final long serialVersionUID = 1L; From 9af23d23d97413338ce2b800697b19970aca3dd5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 1 Dec 2023 17:23:09 -0500 Subject: [PATCH 0365/1112] add mixed/other dialogs for transfer case --- .../edu/harvard/iq/dataverse/DatasetPage.java | 92 ++++++++++++------- src/main/java/propertyFiles/Bundle.properties | 6 +- src/main/webapp/dataset.xhtml | 48 ++++++++-- src/main/webapp/filesFragment.xhtml | 10 +- 4 files changed, 110 insertions(+), 46 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 0b0d0a2e4f5..47a32987b0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -365,6 +365,7 @@ public void setSelectedHostDataverse(Dataverse selectedHostDataverse) { */ private boolean versionHasTabular = false; private boolean versionHasGlobus = false; + private boolean globusTransferRequested = false; private boolean showIngestSuccess; @@ -3116,6 +3117,16 @@ public void setSelectedGlobusTransferableFiles(List selectedGlobus this.selectedGlobusTransferableFiles = selectedGlobusTransferableFiles; } + private List selectedNonGlobusTransferableFiles; + + public List getSelectedNonGlobusTransferableFiles() { + return selectedNonGlobusTransferableFiles; + } + + public void setSelectedNonGlobusTransferableFiles(List selectedNonGlobusTransferableFiles) { + this.selectedNonGlobusTransferableFiles = selectedNonGlobusTransferableFiles; + } + public String getSizeOfDataset() { return DatasetUtil.getDownloadSize(workingVersion, false); } @@ -3227,7 +3238,7 @@ private void startDownload(boolean downloadOriginal){ boolean guestbookRequired = isDownloadPopupRequired(); boolean validate = validateFilesForDownload(downloadOriginal); if (validate) { - updateGuestbookResponse(guestbookRequired, downloadOriginal); + updateGuestbookResponse(guestbookRequired, downloadOriginal, false); if(!guestbookRequired && !getValidateFilesOutcome().equals("Mixed")){ startMultipleFileDownload(); } @@ -3289,8 +3300,9 @@ public boolean validateFilesForDownload(boolean downloadOriginal){ return false; } - if (!(getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty()) - && !getSelectedNonDownloadableFiles().isEmpty()) { + //Some are selected and there are non-downloadable ones or there are both downloadable and globus transferable files + if ((!(getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty()) + && (!getSelectedNonDownloadableFiles().isEmpty()) || (!getSelectedDownloadableFiles().isEmpty() && !getSelectedGlobusTransferableFiles().isEmpty()))) { setValidateFilesOutcome("Mixed"); return true; } @@ -3302,7 +3314,7 @@ public boolean validateFilesForDownload(boolean downloadOriginal){ } - private void updateGuestbookResponse (boolean guestbookRequired, boolean downloadOriginal) { + private void updateGuestbookResponse (boolean guestbookRequired, boolean downloadOriginal, boolean isGlobusTransfer) { // Note that the GuestbookResponse object may still have information from // the last download action performed by the user. For example, it may // still have the non-null Datafile in it, if the user has just downloaded @@ -3310,7 +3322,11 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa // even if that's not what they are trying to do now. // So make sure to reset these values: guestbookResponse.setDataFile(null); - guestbookResponse.setSelectedFileIds(getSelectedDownloadableFilesIdsString()); + if(isGlobusTransfer) { + guestbookResponse.setSelectedFileIds(getFilesIdsString(getSelectedGlobusTransferableFiles())); + } else { + guestbookResponse.setSelectedFileIds(getSelectedDownloadableFilesIdsString()); + } if (downloadOriginal) { guestbookResponse.setFileFormat("original"); } else { @@ -3331,6 +3347,7 @@ private boolean filterSelectedFiles(){ setSelectedRestrictedFiles(new ArrayList<>()); setSelectedUnrestrictedFiles(new ArrayList<>()); setSelectedGlobusTransferableFiles(new ArrayList<>()); + setSelectedNonGlobusTransferableFiles(new ArrayList<>()); boolean someFiles = false; boolean globusDownloadEnabled = systemConfig.isGlobusDownload(); @@ -3346,11 +3363,14 @@ private boolean filterSelectedFiles(){ if(downloadable){ getSelectedDownloadableFiles().add(fmd); someFiles=true; - } else if(globusTransferable) { + } else { + getSelectedNonDownloadableFiles().add(fmd); + } + if(globusTransferable) { getSelectedGlobusTransferableFiles().add(fmd); someFiles=true; } else { - getSelectedNonDownloadableFiles().add(fmd); + getSelectedNonGlobusTransferableFiles().add(fmd); } if(fmd.isRestricted()){ getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not @@ -6318,37 +6338,45 @@ public boolean isHasPublicStore() { return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.PublicInstall, StorageIO.isPublicStore(dataset.getEffectiveStorageDriverId())); } - public void startGlobusTransfer(boolean transferAll) { - if(transferAll) { + public boolean isGlobusTransferRequested() { + return globusTransferRequested; + } + + public void startGlobusTransfer(boolean transferAll, boolean popupShown) { + if (transferAll) { this.setSelectedFiles(workingVersion.getFileMetadatas()); } + boolean guestbookRequired = isDownloadPopupRequired(); + boolean validated = validateFilesForDownload(true); if (validated) { - ApiToken apiToken = null; - User user = session.getUser(); - if (user instanceof AuthenticatedUser) { - apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); - } else if (user instanceof PrivateUrlUser) { - PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; - PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); - apiToken = new ApiToken(); - apiToken.setTokenString(privUrl.getToken()); - } - if (fileMetadataForAction != null) { - List downloadFMList = new ArrayList(1); - downloadFMList.add(fileMetadataForAction); - PrimeFaces.current() - .executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, downloadFMList)); - } else { - if (getSelectedGlobusTransferableFiles() != null) { - PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, - getSelectedGlobusTransferableFiles())); + globusTransferRequested = true; + boolean mixed = "Mixed".equals(getValidateFilesOutcome()); + // transfer is + updateGuestbookResponse(guestbookRequired, true, true); + if ((!guestbookRequired && !mixed) || popupShown) { + ApiToken apiToken = null; + User user = session.getUser(); + if (user instanceof AuthenticatedUser) { + apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); + } else if (user instanceof PrivateUrlUser) { + PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; + PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); + apiToken = new ApiToken(); + apiToken.setTokenString(privUrl.getToken()); + } + if (fileMetadataForAction != null) { + List downloadFMList = new ArrayList(1); + downloadFMList.add(fileMetadataForAction); + PrimeFaces.current() + .executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, downloadFMList)); } else { - // ToDo: For non-public, need the subset that are downloadable by the user - // ToDo: For mixed (some in backing store), need the ones in the globus store - PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, - workingVersion.getFileMetadatas())); + if (getSelectedGlobusTransferableFiles() != null) { + PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, + getSelectedGlobusTransferableFiles())); + } } + globusTransferRequested = false; } } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 13e3a675a27..65dd020f27b 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -64,6 +64,7 @@ manager=Manager curator=Curator explore=Explore download=Download +transfer=Globus Transfer downloadOriginal=Original Format downloadArchival=Archival Format (.tab) deaccession=Deaccession @@ -1391,6 +1392,7 @@ dataset.accessBtn.header.explore=Explore Options dataset.accessBtn.header.configure=Configure Options dataset.accessBtn.header.compute=Compute Options dataset.accessBtn.download.size=ZIP ({0}) +dataset.accessBtn.transfer.size=({0}) dataset.accessBtn.too.big=The dataset is too large to download. Please select the files you need from the files table. dataset.accessBtn.original.too.big=The dataset is too large to download in the original format. Please select the files you need from the files table. dataset.accessBtn.archival.too.big=The dataset is too large to download in the archival format. Please select the files you need from the files table. @@ -1655,8 +1657,10 @@ dataset.inValidSelectedFilesForDownloadWithEmbargo=Embargoed and/or Restricted F dataset.noValidSelectedFilesForDownload=The selected file(s) may not be downloaded because you have not been granted access. dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. dataset.mixedSelectedFilesForDownloadWithEmbargo=The embargoed and/or restricted file(s) selected may not be downloaded because you have not been granted access. - +dataset.mixedSelectedFilesForTransfer=Some file(s) cannot be transferred. (They are restricted, embargoed, or not Globus accessible.) +dataset.inValidSelectedFilesForTransfer=Ineligible Files Selected dataset.downloadUnrestricted=Click Continue to download the files you have access to download. +dataset.transferUnrestricted=Click Continue to transfer the elligible files. dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button. dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 0b8983a7770..e50e68ec162 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -178,7 +178,7 @@
  • + oncomplete="showPopup(false);"> #{bundle.download} @@ -192,7 +192,7 @@
  • #{bundle.downloadOriginal} @@ -208,7 +208,7 @@
  • - #{bundle.downloadArchival} @@ -230,9 +230,14 @@
  • - - - + + #{bundle.transfer} + + + + +
  • @@ -1095,6 +1100,28 @@ + +

    #{bundle['dataset.mixedSelectedFilesForTransfer']}

    + + + + + + +
    #{resFile.label}
    +
    +

    #{bundle['dataset.transferUnrestricted']}

    + + + +
    +

    #{bundle['file.deleteDialog.tip']}

    @@ -1545,6 +1572,7 @@ + @@ -1911,10 +1939,14 @@ $('button[id$="updateOwnerDataverse"]').trigger('click'); } - function showPopup() { + function showPopup(isTransfer) { var outcome = document.getElementById("datasetForm:validateFilesOutcome").value; if (outcome ==='Mixed'){ - PF('downloadMixed').show(); + if(isTransfer) { + PF('globusTransferMixed').show(); + } else { + PF('downloadMixed').show(); + } } if (outcome ==='FailEmpty'){ PF('selectFilesForDownload').show(); diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 3d28e3170f7..58899ab7062 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -442,7 +442,7 @@ disabled="#{false and DatasetPage.lockedFromDownload}" onclick="if (!testFilesSelected()) return false;" action="#{DatasetPage.startDownloadSelectedOriginal()}" - update="@form" oncomplete="showPopup();"> + update="@form" oncomplete="showPopup(false);"> #{bundle.download} @@ -459,7 +459,7 @@
  • @@ -470,7 +470,7 @@
  • @@ -481,9 +481,9 @@
  • + actionListener="#{DatasetPage.startGlobusTransfer(false, false)}"> #{bundle['file.globus.transfer']} From 43105d31ae3d5357e450da3a98cac6886e18a1d3 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Sat, 2 Dec 2023 13:14:28 -0500 Subject: [PATCH 0366/1112] refactor, handle guestbook at download case --- .../edu/harvard/iq/dataverse/DatasetPage.java | 29 ++----- .../iq/dataverse/FileDownloadHelper.java | 36 ++++---- .../iq/dataverse/GuestbookResponse.java | 2 +- .../dataverse/api/DownloadInstanceWriter.java | 6 +- .../dataverse/globus/GlobusServiceBean.java | 86 ++++++++++++++++--- .../guestbook-terms-popup-fragment.xhtml | 13 ++- 6 files changed, 115 insertions(+), 57 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 47a32987b0b..830e146fa07 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3321,7 +3321,11 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa // a single file; or it may still have the format set to "original" - // even if that's not what they are trying to do now. // So make sure to reset these values: - guestbookResponse.setDataFile(null); + if(fileMetadataForAction == null) { + guestbookResponse.setDataFile(null); + } else { + guestbookResponse.setDataFile(fileMetadataForAction.getDataFile()); + } if(isGlobusTransfer) { guestbookResponse.setSelectedFileIds(getFilesIdsString(getSelectedGlobusTransferableFiles())); } else { @@ -6355,27 +6359,8 @@ public void startGlobusTransfer(boolean transferAll, boolean popupShown) { // transfer is updateGuestbookResponse(guestbookRequired, true, true); if ((!guestbookRequired && !mixed) || popupShown) { - ApiToken apiToken = null; - User user = session.getUser(); - if (user instanceof AuthenticatedUser) { - apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); - } else if (user instanceof PrivateUrlUser) { - PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; - PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); - apiToken = new ApiToken(); - apiToken.setTokenString(privUrl.getToken()); - } - if (fileMetadataForAction != null) { - List downloadFMList = new ArrayList(1); - downloadFMList.add(fileMetadataForAction); - PrimeFaces.current() - .executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, downloadFMList)); - } else { - if (getSelectedGlobusTransferableFiles() != null) { - PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken, - getSelectedGlobusTransferableFiles())); - } - } + boolean doNotSaveGuestbookResponse = workingVersion.isDraft(); + globusService.writeGuestbookAndStartTransfer(guestbookResponse, doNotSaveGuestbookResponse); globusTransferRequested = false; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index a6ae7223d9d..4d8100124ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.externaltools.ExternalTool; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; @@ -53,6 +54,9 @@ public class FileDownloadHelper implements java.io.Serializable { @EJB DataFileServiceBean datafileService; + + @EJB + GlobusServiceBean globusService; private final Map fileDownloadPermissionMap = new HashMap<>(); // { FileMetadata.id : Boolean } @@ -60,32 +64,32 @@ public FileDownloadHelper() { this.filesForRequestAccess = new ArrayList<>(); } - // See also @Size(max = 255) in GuestbookResponse - private boolean testResponseLength(String value) { - return !(value != null && value.length() > 255); - } - // This helper method is called from the Download terms/guestbook/etc. popup, // when the user clicks the "ok" button. We use it, instead of calling // downloadServiceBean directly, in order to differentiate between single // file downloads and multiple (batch) downloads - since both use the same // terms/etc. popup. - public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) { + public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse, boolean isGlobusTransfer) { PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); // Note that this method is only ever called from the file-download-popup - // meaning we know for the fact that we DO want to save this // guestbookResponse permanently in the database. - if (guestbookResponse.getSelectedFileIds() != null) { - // this is a batch (multiple file) download. - // Although here's a chance that this is not really a batch download - i.e., - // there may only be one file on the file list. But the fileDownloadService - // method below will check for that, and will redirect to the single download, if - // that's the case. -- L.A. - fileDownloadService.writeGuestbookAndStartBatchDownload(guestbookResponse); - } else if (guestbookResponse.getDataFile() != null) { - // this a single file download: - fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse); + if(isGlobusTransfer) { + globusService.writeGuestbookAndStartTransfer(guestbookResponse, true); + } else { + if (guestbookResponse.getSelectedFileIds() != null) { + // this is a batch (multiple file) download. + // Although here's a chance that this is not really a batch download - i.e., + // there may only be one file on the file list. But the fileDownloadService + // method below will check for that, and will redirect to the single download, + // if + // that's the case. -- L.A. + fileDownloadService.writeGuestbookAndStartBatchDownload(guestbookResponse); + } else if (guestbookResponse.getDataFile() != null) { + // this a single file download: + fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 976f1e084ac..9041ccf887c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -99,7 +99,7 @@ public class GuestbookResponse implements Serializable { */ public static final String ACCESS_REQUEST = "AccessRequest"; - static final String DOWNLOAD = "Download"; + public static final String DOWNLOAD = "Download"; static final String SUBSET = "Subset"; static final String EXPLORE = "Explore"; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index cc064976982..bcb8799ec9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -213,9 +213,9 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] if (di.getConversionParam().equals("format")) { if ("GlobusTransfer".equals(di.getConversionParamValue())) { - List downloadFMList = new ArrayList(1); - downloadFMList.add(dataFile.getFileMetadata()); - redirect_url_str = globusService.getGlobusAppUrlForDataset(dataFile.getOwner(), false, downloadFMList); + List downloadDFList = new ArrayList(1); + downloadDFList.add(dataFile); + redirect_url_str = globusService.getGlobusAppUrlForDataset(dataFile.getOwner(), false, downloadDFList); } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d8742fc90d5..0c991424ce9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -50,15 +50,19 @@ import java.util.stream.IntStream; import org.apache.commons.codec.binary.StringUtils; +import org.primefaces.PrimeFaces; import com.google.gson.Gson; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.privateurl.PrivateUrl; +import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; @@ -73,21 +77,22 @@ public class GlobusServiceBean implements java.io.Serializable { @EJB protected DatasetServiceBean datasetSvc; - @EJB protected SettingsServiceBean settingsSvc; - @Inject DataverseSession session; - @EJB protected AuthenticationServiceBean authSvc; - @EJB EjbDataverseEngine commandEngine; - @EJB UserNotificationServiceBean userNotificationService; + @EJB + PrivateUrlServiceBean privateUrlService; + @EJB + FileDownloadServiceBean fileDownloadService; + @EJB + DataFileServiceBean dataFileService; private static final Logger logger = Logger.getLogger(GlobusServiceBean.class.getCanonicalName()); private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); @@ -600,7 +605,7 @@ public String getGlobusAppUrlForDataset(Dataset d) { return getGlobusAppUrlForDataset(d, true, null); } - public String getGlobusAppUrlForDataset(Dataset d, boolean upload, List fileMetadataList) { + public String getGlobusAppUrlForDataset(Dataset d, boolean upload, List dataFiles) { String localeCode = session.getLocaleCode(); ApiToken apiToken = null; User user = session.getUser(); @@ -629,10 +634,6 @@ public String getGlobusAppUrlForDataset(Dataset d, boolean upload, List downloadDFList = new ArrayList(1); + downloadDFList.add(df); + if (!doNotSaveGuestbookResponse) { + fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); + } + PrimeFaces.current() + .executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, downloadDFList)); + } else { + //Following FileDownloadServiceBean writeGuestbookAndStartBatchDownload + List list = new ArrayList<>(Arrays.asList(guestbookResponse.getSelectedFileIds().split(","))); + List selectedFiles = new ArrayList(); + for (String idAsString : list) { + try { + Long fileId = Long.parseLong(idAsString); + // If we need to create a GuestBookResponse record, we have to + // look up the DataFile object for this file: + if (!doNotSaveGuestbookResponse) { + df = dataFileService.findCheapAndEasy(fileId); + guestbookResponse.setDataFile(df); + fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); + selectedFiles.add(df); + } + } catch (NumberFormatException nfe) { + logger.warning("A file id passed to the writeGuestbookAndStartTransfer method as a string could not be converted back to Long: " + idAsString); + return; + } + + } + if (!selectedFiles.isEmpty()) { + //Use dataset from one file - files should all be from the same dataset + PrimeFaces.current().executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, + selectedFiles)); + } + } + } } diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 34df0c79390..5948047d845 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -274,8 +274,17 @@ + + + + From a76158f5903ec73a78b284de90d6491a7e05bfce Mon Sep 17 00:00:00 2001 From: qqmyers Date: Sat, 2 Dec 2023 13:35:33 -0500 Subject: [PATCH 0367/1112] suppress download entry when not accessible, refactor --- .../edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../harvard/iq/dataverse/SettingsWrapper.java | 22 +++ .../file-download-button-fragment.xhtml | 6 +- .../dataaccess/GlobusOverlayAccessIOTest.java | 176 ++++++++++++++++++ 4 files changed, 202 insertions(+), 4 deletions(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 830e146fa07..704c1d42228 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3354,7 +3354,7 @@ private boolean filterSelectedFiles(){ setSelectedNonGlobusTransferableFiles(new ArrayList<>()); boolean someFiles = false; - boolean globusDownloadEnabled = systemConfig.isGlobusDownload(); + boolean globusDownloadEnabled = settingsWrapper.isGlobusDownload(); for (FileMetadata fmd : this.selectedFiles){ boolean downloadable=this.fileDownloadHelper.canDownloadFile(fmd); diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 8b7f732d03f..8ab1e87aef2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -6,6 +6,8 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.Setting; @@ -337,6 +339,26 @@ public boolean isGlobusEnabledStorageDriver(String driverId) { return (GlobusAccessibleStore.acceptsGlobusTransfers(driverId) || GlobusAccessibleStore.allowsGlobusReferences(driverId)); } + public boolean isDownloadable(FileMetadata fmd) { + boolean downloadable=true; + if(isGlobusFileDownload()) { + String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier()); + + downloadable = downloadable && !AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(driverId); + } + return downloadable; + } + + public boolean isGlobusTransferable(FileMetadata fmd) { + boolean globusTransferable=true; + if(isGlobusFileDownload()) { + String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier()); + globusTransferable = GlobusAccessibleStore.isGlobusAccessible(driverId); + } + return globusTransferable; + } + + public String getGlobusAppUrl() { if (globusAppUrl == null) { globusAppUrl = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost"); diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 318aab1454e..9c29fd777a1 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -60,7 +60,7 @@ -
  • +
  • gsio = new GlobusOverlayAccessIO(datafile, null, "globus"); + System.out.println("Size2 is " + gsio.retrieveSizeFromMedia()); + + System.out.println( + "NotValid: " + GlobusOverlayAccessIO.isValidIdentifier("globus", "globus://localid//../of/the/hill")); + System.out.println( + "ValidRemote: " + GlobusOverlayAccessIO.isValidIdentifier("globus", "globus://localid//of/the/hill")); + System.setProperty("dataverse.files.globus.managed", "true"); + datafile.setStorageIdentifier("globus://" + baseStoreId + "//" + logoPath); + System.out.println("ValidLocal: " + + GlobusOverlayAccessIO.isValidIdentifier("globus", "globus://176e28068b0-1c3f80357c42")); + + // We can read the storageIdentifier and get the driver + assertTrue(datafile.getStorageIdentifier() + .startsWith(DataAccess.getStorageDriverFromIdentifier(datafile.getStorageIdentifier()))); + // We can get the driver type from it's ID + assertTrue(DataAccess.getDriverType("globus").equals(System.getProperty("dataverse.files.globus.type"))); + // When we get a StorageIO for the file, it is the right type + StorageIO storageIO = DataAccess.getStorageIO(localDatafile); + assertTrue(storageIO instanceof GlobusOverlayAccessIO); + // When we use it, we can get properties like the remote store name + GlobusOverlayAccessIO globusIO = (GlobusOverlayAccessIO) storageIO; + assertTrue( + globusIO.getRemoteStoreName().equals(System.getProperty("dataverse.files.globus.remote-store-name"))); + + String location = globusIO.getStorageLocation(); + assertEquals("globus:///" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + baseStoreId, location); +/* + // TBD: + // And can get a temporary download URL for the main file + String signedURL = globusIO.generateTemporaryDownloadUrl(null, null, null); + System.out.println(signedURL); + // And the URL starts with the right stuff + assertTrue(signedURL.startsWith(System.getProperty("dataverse.files.globus." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH) + "/" + logoPath)); + // And the signature is valid + // assertTrue( + // UrlSignerUtil.isValidUrl(signedURL, null, null, + // System.getProperty("dataverse.files.globus.secret-key"))); + // And we get an unsigned URL with the right stuff with no key + System.clearProperty("dataverse.files.globus.secret-key"); + String unsignedURL = globusIO.generateTemporaryDownloadUrl(null, null, null); + assertTrue(unsignedURL.equals(System.getProperty("dataverse.files.globus.base-url") + "/" + logoPath)); +*/ + // Once we've opened, we can get the file size (only works if the call to Globus + // works) + globusIO.open(DataAccessOption.READ_ACCESS); + assertTrue(globusIO.getSize() > 0); + // If we ask for the path for an aux file, it is correct + System.out.println(Paths.get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority, + identifier, baseStoreId + ".auxobject").toString()); + System.out.println(globusIO.getAuxObjectAsPath("auxobject").toString()); + assertTrue(Paths.get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority, identifier, + baseStoreId + ".auxobject").equals(globusIO.getAuxObjectAsPath("auxobject"))); + IOException thrown = assertThrows(IOException.class, () -> DataAccess.getStorageIO(localDatafile), + "Expected getStorageIO() to throw, but it didn't"); + // 'test' is the driverId in the IOException messages + assertTrue(thrown.getMessage().contains("globus")); + + } + + @Test + void testRemoteOverlayIdentifierFormats() throws IOException { + System.clearProperty("dataverse.files.globus.managed"); + datafile.setStorageIdentifier( + "globus://" + baseStoreId + "//d8c42580-6528-4605-9ad8-116a61982644/hdc1/" + logoPath); + assertTrue(DataAccess.isValidDirectStorageIdentifier(datafile.getStorageIdentifier())); + assertFalse( + DataAccess.isValidDirectStorageIdentifier(datafile.getStorageIdentifier().replace("globus", "bad"))); + assertFalse(DataAccess.isValidDirectStorageIdentifier(localDatafile.getStorageIdentifier())); + System.setProperty("dataverse.files.globus.managed", "true"); + assertTrue(DataAccess.isValidDirectStorageIdentifier(localDatafile.getStorageIdentifier())); + + } + +} From 93a586727a3c00069699eb47e5ca5ca3ebbf91cf Mon Sep 17 00:00:00 2001 From: qqmyers Date: Sat, 2 Dec 2023 17:58:45 -0500 Subject: [PATCH 0368/1112] remove old testing code --- .../dataaccess/GlobusOverlayAccessIO.java | 46 ----- .../dataaccess/GlobusOverlayAccessIOTest.java | 176 ------------------ 2 files changed, 222 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 3e72fa85d35..e825af8cf30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -408,52 +408,6 @@ private static String[] getAllowedEndpoints(String driverId) throws IOException } - public static void main(String[] args) { - System.out.println("Running the main method"); - if (args.length > 0) { - System.out.printf("List of arguments: {}", Arrays.toString(args)); - } - System.setProperty("dataverse.files.globus.base-url", "globus://d8c42580-6528-4605-9ad8-116a61982644"); - System.out.println("NotValid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); - System.out.println("ValidRemote: " + isValidIdentifier("globus", "globus://localid//of/the/hill")); - System.setProperty("dataverse.files.globus.managed", "true"); - - System.out.println("ValidLocal: " + isValidIdentifier("globus", "globus://176e28068b0-1c3f80357c42")); - System.setProperty("dataverse.files.globus.globus-token", - ""); - System.setProperty("dataverse.files.globus.base-store", "file"); - System.setProperty("dataverse.files.file.type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); - System.setProperty("dataverse.files.file.directory", "/tmp/files"); - // logger.info(JvmSettings.BASE_URL.lookup("globus")); - // logger.info(JvmSettings.GLOBUS_TOKEN.lookup("globus")); - - try { - GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO( - "globus://1234///hdc1/image001.mrc", "globus"); - logger.info("Size is " + gsio.retrieveSizeFromMedia()); - - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - try { - DataFile df = new DataFile(); - Dataset ds = new Dataset(); - ds.setAuthority("10.5072"); - ds.setIdentifier("FK21234"); - df.setOwner(ds); - df.setStorageIdentifier("globus://1234///hdc1/image001.mrc"); - GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO(df, null, "globus"); - logger.info("Size2 is " + gsio.retrieveSizeFromMedia()); - - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Override public void open(DataAccessOption... option) throws IOException { // TODO Auto-generated method stub diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java index bf3bcdbfe8e..e69de29bb2d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java @@ -1,176 +0,0 @@ -/* - * Copyright 2018 Forschungszentrum Jülich GmbH - * SPDX-License-Identifier: Apache 2.0 - */ -package edu.harvard.iq.dataverse.dataaccess; - -import edu.harvard.iq.dataverse.DOIServiceBean; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.GlobalIdServiceBean; -import edu.harvard.iq.dataverse.mocks.MocksFactory; -import edu.harvard.iq.dataverse.settings.JvmSettings; -import edu.harvard.iq.dataverse.util.UrlSignerUtil; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import static org.junit.jupiter.api.Assertions.*; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.mockito.junit.jupiter.MockitoSettings; -import org.mockito.quality.Strictness; -import java.io.IOException; -import java.nio.file.Paths; - -@ExtendWith(MockitoExtension.class) -@MockitoSettings(strictness = Strictness.STRICT_STUBS) -public class GlobusOverlayAccessIOTest { - - @Mock - - private Dataset dataset; - private DataFile datafile; - private DataFile localDatafile; - private String baseStoreId = "182ad2bda2f-c3508e719076"; - private String logoPath = "image002.mrc"; - private String authority = "10.5072"; - private String identifier = "F2ABCDEF"; - - @BeforeEach - public void setUp() { - System.setProperty("dataverse.files.globus." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH, - "d8c42580-6528-4605-9ad8-116a61982644/hdc1"); - System.setProperty("dataverse.files.globus." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS, - "d8c42580-6528-4605-9ad8-116a61982644/hdc1"); - - System.setProperty("dataverse.files.globus.globus-token", - "YTVlNzFjNzItYWVkYi00Mzg4LTkzNWQtY2NhM2IyODI2MzdmOnErQXRBeWNEMVM3amFWVnB0RlFnRk5zMTc3OFdDa3lGeVZPT3k0RDFpaXM9"); - System.setProperty("dataverse.files.globus.remote-store-name", "GlobusEndpoint1"); - System.setProperty("dataverse.files.globus.type", "globus"); - - System.setProperty("dataverse.files.globus.managed", "true"); - - System.setProperty("dataverse.files.globus.base-store", "file"); - System.setProperty("dataverse.files.file.type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); - System.setProperty("dataverse.files.file.directory", "/tmp/files"); - - // System.setProperty("dataverse.files.test.type", "remote"); - System.setProperty("dataverse.files.globus.label", "globusTest"); - System.setProperty("dataverse.files.test.base-url", "https://demo.dataverse.org/resources"); - System.setProperty("dataverse.files.test.base-store", "file"); - System.setProperty("dataverse.files.test.download-redirect", "true"); - System.setProperty("dataverse.files.test.remote-store-name", "DemoDataCorp"); - System.setProperty("dataverse.files.globus.secret-key", "12345"); // Real keys should be much longer, more - // random - System.setProperty("dataverse.files.file.type", "file"); - System.setProperty("dataverse.files.file.label", "default"); - datafile = MocksFactory.makeDataFile(); - dataset = MocksFactory.makeDataset(); - dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", - DOIServiceBean.DOI_RESOLVER_URL, null)); - datafile.setOwner(dataset); - datafile.setStorageIdentifier("globus://" + baseStoreId + "//" + logoPath); - - localDatafile = MocksFactory.makeDataFile(); - localDatafile.setOwner(dataset); - localDatafile.setStorageIdentifier("globus://" + baseStoreId); - } - - @AfterEach - public void tearDown() { - System.clearProperty("dataverse.files.test.type"); - System.clearProperty("dataverse.files.test.label"); - System.clearProperty("dataverse.files.test.base-url"); - System.clearProperty("dataverse.files.test.base-store"); - System.clearProperty("dataverse.files.test.download-redirect"); - System.clearProperty("dataverse.files.test.label"); - System.clearProperty("dataverse.files.test.remote-store-name"); - System.clearProperty("dataverse.files.test.secret-key"); - System.clearProperty("dataverse.files.file.type"); - System.clearProperty("dataverse.files.file.label"); - } - - @Test - void testGlobusOverlayFiles() throws IOException { - System.clearProperty("dataverse.files.globus.managed"); - datafile.setStorageIdentifier( - "globus://" + baseStoreId + "//d8c42580-6528-4605-9ad8-116a61982644/hdc1/" + logoPath); - GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO(datafile, null, "globus"); - System.out.println("Size2 is " + gsio.retrieveSizeFromMedia()); - - System.out.println( - "NotValid: " + GlobusOverlayAccessIO.isValidIdentifier("globus", "globus://localid//../of/the/hill")); - System.out.println( - "ValidRemote: " + GlobusOverlayAccessIO.isValidIdentifier("globus", "globus://localid//of/the/hill")); - System.setProperty("dataverse.files.globus.managed", "true"); - datafile.setStorageIdentifier("globus://" + baseStoreId + "//" + logoPath); - System.out.println("ValidLocal: " - + GlobusOverlayAccessIO.isValidIdentifier("globus", "globus://176e28068b0-1c3f80357c42")); - - // We can read the storageIdentifier and get the driver - assertTrue(datafile.getStorageIdentifier() - .startsWith(DataAccess.getStorageDriverFromIdentifier(datafile.getStorageIdentifier()))); - // We can get the driver type from it's ID - assertTrue(DataAccess.getDriverType("globus").equals(System.getProperty("dataverse.files.globus.type"))); - // When we get a StorageIO for the file, it is the right type - StorageIO storageIO = DataAccess.getStorageIO(localDatafile); - assertTrue(storageIO instanceof GlobusOverlayAccessIO); - // When we use it, we can get properties like the remote store name - GlobusOverlayAccessIO globusIO = (GlobusOverlayAccessIO) storageIO; - assertTrue( - globusIO.getRemoteStoreName().equals(System.getProperty("dataverse.files.globus.remote-store-name"))); - - String location = globusIO.getStorageLocation(); - assertEquals("globus:///" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + baseStoreId, location); -/* - // TBD: - // And can get a temporary download URL for the main file - String signedURL = globusIO.generateTemporaryDownloadUrl(null, null, null); - System.out.println(signedURL); - // And the URL starts with the right stuff - assertTrue(signedURL.startsWith(System.getProperty("dataverse.files.globus." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH) + "/" + logoPath)); - // And the signature is valid - // assertTrue( - // UrlSignerUtil.isValidUrl(signedURL, null, null, - // System.getProperty("dataverse.files.globus.secret-key"))); - // And we get an unsigned URL with the right stuff with no key - System.clearProperty("dataverse.files.globus.secret-key"); - String unsignedURL = globusIO.generateTemporaryDownloadUrl(null, null, null); - assertTrue(unsignedURL.equals(System.getProperty("dataverse.files.globus.base-url") + "/" + logoPath)); -*/ - // Once we've opened, we can get the file size (only works if the call to Globus - // works) - globusIO.open(DataAccessOption.READ_ACCESS); - assertTrue(globusIO.getSize() > 0); - // If we ask for the path for an aux file, it is correct - System.out.println(Paths.get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority, - identifier, baseStoreId + ".auxobject").toString()); - System.out.println(globusIO.getAuxObjectAsPath("auxobject").toString()); - assertTrue(Paths.get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority, identifier, - baseStoreId + ".auxobject").equals(globusIO.getAuxObjectAsPath("auxobject"))); - IOException thrown = assertThrows(IOException.class, () -> DataAccess.getStorageIO(localDatafile), - "Expected getStorageIO() to throw, but it didn't"); - // 'test' is the driverId in the IOException messages - assertTrue(thrown.getMessage().contains("globus")); - - } - - @Test - void testRemoteOverlayIdentifierFormats() throws IOException { - System.clearProperty("dataverse.files.globus.managed"); - datafile.setStorageIdentifier( - "globus://" + baseStoreId + "//d8c42580-6528-4605-9ad8-116a61982644/hdc1/" + logoPath); - assertTrue(DataAccess.isValidDirectStorageIdentifier(datafile.getStorageIdentifier())); - assertFalse( - DataAccess.isValidDirectStorageIdentifier(datafile.getStorageIdentifier().replace("globus", "bad"))); - assertFalse(DataAccess.isValidDirectStorageIdentifier(localDatafile.getStorageIdentifier())); - System.setProperty("dataverse.files.globus.managed", "true"); - assertTrue(DataAccess.isValidDirectStorageIdentifier(localDatafile.getStorageIdentifier())); - - } - -} From 1a96c566bccdf32aefeaca89898a3746b146fa08 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 3 Dec 2023 18:57:59 -0500 Subject: [PATCH 0369/1112] the kill switch for the real-time storageuse updates (just in case) and some related documentation (#8549) --- .../source/admin/collectionquotas.rst | 2 + .../source/installation/config.rst | 5 ++ .../dataverse/ingest/IngestServiceBean.java | 2 +- .../iq/dataverse/settings/JvmSettings.java | 3 + .../storageuse/StorageUseServiceBean.java | 58 ++++++------------- 5 files changed, 30 insertions(+), 40 deletions(-) diff --git a/doc/sphinx-guides/source/admin/collectionquotas.rst b/doc/sphinx-guides/source/admin/collectionquotas.rst index 883b6cf0c93..2ce3132e2ba 100644 --- a/doc/sphinx-guides/source/admin/collectionquotas.rst +++ b/doc/sphinx-guides/source/admin/collectionquotas.rst @@ -1,3 +1,4 @@ + Storage Quotas for Collections ============================== @@ -15,3 +16,4 @@ Please note that only the sizes of the main datafiles and the archival tab-delim When quotas are set and enforced, the users will be informed of the remaining storage allocation on the file upload page together with other upload and processing limits. +Part of the new and experimental nature of this feature is that we don't know for the fact yet how well it will function in real life on a very busy production system, despite our best efforts to test it prior to the release. One specific issue is having to update the recorded storage use for every parent collection of the given dataset whenever new files are added. This includes updating the combined size of the root, top collection - which will need to be updated after *every* file upload. In an unlikely case that this will start causing problems with race conditions and database update conflicts, it is possible to disable these updates (and thus disable the storage quotas feature), by setting the :ref:`dataverse.storageuse.disable-storageuse-increments` JVM setting to true. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 52ba35376ac..03eeff9dbb6 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2489,6 +2489,11 @@ This setting was added to keep S3 direct upload lightweight. When that feature i See also :ref:`s3-direct-upload-features-disabled`. +dataverse.storageuse.disable-storageuse-increments +++++++++++++++++++++++++++++++++++++++++++++++++++ + +This setting serves the role of an emergency "kill switch" that will disable maintaining the real time record of storage use for all the datasets and collections in the database. Because of the experimental nature of this feature (see :doc:`/admin/collectionquotas`) that hasn't been used in production setting as of this release, v6.1 this setting is provided in case these updates start causing database race conditions and conflicts on a busy server. + dataverse.auth.oidc.* +++++++++++++++++++++ diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 5efb4c06f48..233f746fb17 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -206,7 +206,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, boolean unattached = false; boolean savedSuccess = false; if (dataFile.getOwner() == null) { - // is it ever "unattached"? + // is it ever "attached"? // do we ever call this method with dataFile.getOwner() != null? // - we really shouldn't be, either. unattached = true; diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index cc3272413c7..7c65bba77d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -150,6 +150,9 @@ public enum JvmSettings { SCOPE_NETCDF(PREFIX, "netcdf"), GEO_EXTRACT_S3_DIRECT_UPLOAD(SCOPE_NETCDF, "geo-extract-s3-direct-upload"), + // STORAGE USE SETTINGS + SCOPE_STORAGEUSE(PREFIX, "storageuse"), + STORAGEUSE_DISABLE_UPDATES(SCOPE_STORAGEUSE, "disable-storageuse-increments"), ; private static final String SCOPE_SEPARATOR = "."; diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java index b542a7cd661..18e4ef49640 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java @@ -1,12 +1,14 @@ package edu.harvard.iq.dataverse.storageuse; import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.settings.JvmSettings; import jakarta.ejb.Stateless; import jakarta.ejb.TransactionAttribute; import jakarta.ejb.TransactionAttributeType; import jakarta.inject.Named; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; +import java.util.Optional; import java.util.logging.Logger; /** @@ -37,31 +39,6 @@ public Long findStorageSizeByDvContainerId(Long dvObjectId) { return res == null ? 0L : res; } - public void incrementStorageSizeHierarchy(DvObjectContainer dvObject, Long filesize) { - incrementStorageSize(dvObject, filesize); - DvObjectContainer parent = dvObject.getOwner(); - while (parent != null) { - incrementStorageSize(parent, filesize); - parent = parent.getOwner(); - } - } - - /** - * @param dvObject - * @param filesize - */ - public void incrementStorageSize(DvObjectContainer dvObject, Long filesize) { - StorageUse dvContainerSU = findByDvContainerId(dvObject.getId()); - if (dvContainerSU != null) { - // @todo: named query - dvContainerSU.incrementSizeInBytes(filesize); - em.merge(dvContainerSU); - } else { - dvContainerSU = new StorageUse(dvObject, filesize); - em.persist(dvContainerSU); - } - } - /** * Increments the recorded storage size for all the dvobject parents of a * datafile, recursively. @@ -71,20 +48,23 @@ public void incrementStorageSize(DvObjectContainer dvObject, Long filesize) { @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void incrementStorageSizeRecursively(Long dvObjectContainerId, Long increment) { //@todo should throw exceptions if either parameter is null - String queryString = "WITH RECURSIVE uptree (id, owner_id) AS\n" - + "(" - + " SELECT id, owner_id\n" - + " FROM dvobject\n" - + " WHERE id=" + dvObjectContainerId + "\n" - + " UNION ALL\n" - + " SELECT dvobject.id, dvobject.owner_id\n" - + " FROM dvobject\n" - + " JOIN uptree ON dvobject.id = uptree.owner_id)\n" - + "UPDATE storageuse SET sizeinbytes=COALESCE(sizeinbytes,0)+" + increment + "\n" - + "FROM uptree\n" - + "WHERE dvobjectcontainer_id = uptree.id;"; - - int parentsUpdated = em.createNativeQuery(queryString).executeUpdate(); + Optional allow = JvmSettings.STORAGEUSE_DISABLE_UPDATES.lookupOptional(Boolean.class); + if (!(allow.isPresent() && allow.get())) { + String queryString = "WITH RECURSIVE uptree (id, owner_id) AS\n" + + "(" + + " SELECT id, owner_id\n" + + " FROM dvobject\n" + + " WHERE id=" + dvObjectContainerId + "\n" + + " UNION ALL\n" + + " SELECT dvobject.id, dvobject.owner_id\n" + + " FROM dvobject\n" + + " JOIN uptree ON dvobject.id = uptree.owner_id)\n" + + "UPDATE storageuse SET sizeinbytes=COALESCE(sizeinbytes,0)+" + increment + "\n" + + "FROM uptree\n" + + "WHERE dvobjectcontainer_id = uptree.id;"; + + int parentsUpdated = em.createNativeQuery(queryString).executeUpdate(); + } // @todo throw an exception if the number of parent dvobjects updated by // the query is < 2 - ? } From 0a536da0c42ed9654641985f1fd8dc20b461c16c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 4 Dec 2023 09:46:59 -0500 Subject: [PATCH 0370/1112] a missing ref in the doc. #8549 --- doc/sphinx-guides/source/installation/config.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 03eeff9dbb6..7cb321708a7 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2489,6 +2489,8 @@ This setting was added to keep S3 direct upload lightweight. When that feature i See also :ref:`s3-direct-upload-features-disabled`. +.. _dataverse.storageuse.disable-storageuse-increments: + dataverse.storageuse.disable-storageuse-increments ++++++++++++++++++++++++++++++++++++++++++++++++++ From b20f198368615d7d8c4e798a25d6f68a6d0c4ed9 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 4 Dec 2023 11:27:27 -0500 Subject: [PATCH 0371/1112] Bump version to 6.1 --- doc/sphinx-guides/source/conf.py | 4 ++-- doc/sphinx-guides/source/versions.rst | 3 ++- modules/dataverse-parent/pom.xml | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 0660ec3b071..64efc359e9a 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -66,9 +66,9 @@ # built documents. # # The short X.Y version. -version = '6.0' +version = '6.1' # The full version, including alpha/beta/rc tags. -release = '6.0' +release = '6.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index 2000a2097f0..2cf7f46dc5e 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -7,7 +7,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. - pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) ` -- 6.0 +- 6.1 +- `6.0 `__ - `5.14 `__ - `5.13 `__ - `5.12.1 `__ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index db0fa46a952..7b305cad581 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,7 +131,7 @@ - 6.0 + 6.1 17 UTF-8 From 5f29144762c166c7856958497e24f629d53c92a0 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 4 Dec 2023 12:58:01 -0500 Subject: [PATCH 0372/1112] adding 6.1 release notes and removing .md files --- ...001-datasets-files-api-user-permissions.md | 13 -- doc/release-notes/10060-api-changelog.md | 3 - .../10093-signedUrl_improvements.md | 5 - .../10104-dataset-citation-deaccessioned.md | 1 - doc/release-notes/6.1-release-notes.md | 195 ++++++++++++++++++ .../9268-8349-oidc-improvements.md | 43 ---- doc/release-notes/9412-markdown-previewer.md | 1 - doc/release-notes/9428-alternative-title.md | 9 - doc/release-notes/9589-ds-configure-tool.md | 1 - doc/release-notes/9590-intellij-redeploy.md | 3 - .../9599-guestbook-at-request.md | 2 - doc/release-notes/9635-solr-improvements.md | 4 - doc/release-notes/9692-files-api-extension.md | 7 - .../9714-files-api-extension-filters.md | 14 -- .../9763-versions-api-improvements.md | 8 - .../9785-files-api-extension-search-text.md | 3 - .../9834-files-api-extension-counts.md | 6 - ...oad-extension-new-file-access-endpoints.md | 14 -- .../9852-files-api-extension-deaccession.md | 12 -- .../9880-info-api-zip-limit-embargo.md | 5 - .../9907-files-api-counts-with-criteria.md | 11 - doc/release-notes/9955-Signposting-updates.md | 7 - ...et-api-downloadsize-ignore-tabular-size.md | 9 - .../9972-files-api-filter-by-tabular-tags.md | 3 - ...with-criteria-and-deaccessioned-support.md | 12 -- 25 files changed, 195 insertions(+), 196 deletions(-) delete mode 100644 doc/release-notes/10001-datasets-files-api-user-permissions.md delete mode 100644 doc/release-notes/10060-api-changelog.md delete mode 100644 doc/release-notes/10093-signedUrl_improvements.md delete mode 100644 doc/release-notes/10104-dataset-citation-deaccessioned.md create mode 100644 doc/release-notes/6.1-release-notes.md delete mode 100644 doc/release-notes/9268-8349-oidc-improvements.md delete mode 100644 doc/release-notes/9412-markdown-previewer.md delete mode 100644 doc/release-notes/9428-alternative-title.md delete mode 100644 doc/release-notes/9589-ds-configure-tool.md delete mode 100644 doc/release-notes/9590-intellij-redeploy.md delete mode 100644 doc/release-notes/9599-guestbook-at-request.md delete mode 100644 doc/release-notes/9635-solr-improvements.md delete mode 100644 doc/release-notes/9692-files-api-extension.md delete mode 100644 doc/release-notes/9714-files-api-extension-filters.md delete mode 100644 doc/release-notes/9763-versions-api-improvements.md delete mode 100644 doc/release-notes/9785-files-api-extension-search-text.md delete mode 100644 doc/release-notes/9834-files-api-extension-counts.md delete mode 100644 doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md delete mode 100644 doc/release-notes/9852-files-api-extension-deaccession.md delete mode 100644 doc/release-notes/9880-info-api-zip-limit-embargo.md delete mode 100644 doc/release-notes/9907-files-api-counts-with-criteria.md delete mode 100644 doc/release-notes/9955-Signposting-updates.md delete mode 100644 doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md delete mode 100644 doc/release-notes/9972-files-api-filter-by-tabular-tags.md delete mode 100644 doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md diff --git a/doc/release-notes/10001-datasets-files-api-user-permissions.md b/doc/release-notes/10001-datasets-files-api-user-permissions.md deleted file mode 100644 index 0aa75f9218a..00000000000 --- a/doc/release-notes/10001-datasets-files-api-user-permissions.md +++ /dev/null @@ -1,13 +0,0 @@ -- New query parameter `includeDeaccessioned` added to the getVersion endpoint (/api/datasets/{id}/versions/{versionId}) to consider deaccessioned versions when searching for versions. - - -- New endpoint to get user permissions on a dataset (/api/datasets/{id}/userPermissions). In particular, the user permissions that this API call checks, returned as booleans, are the following: - - - Can view the unpublished dataset - - Can edit the dataset - - Can publish the dataset - - Can manage the dataset permissions - - Can delete the dataset draft - - -- New permission check "canManageFilePermissions" added to the existing endpoint for getting user permissions on a file (/api/access/datafile/{id}/userPermissions). \ No newline at end of file diff --git a/doc/release-notes/10060-api-changelog.md b/doc/release-notes/10060-api-changelog.md deleted file mode 100644 index 56ac96e3564..00000000000 --- a/doc/release-notes/10060-api-changelog.md +++ /dev/null @@ -1,3 +0,0 @@ -We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html - -See also #10060. diff --git a/doc/release-notes/10093-signedUrl_improvements.md b/doc/release-notes/10093-signedUrl_improvements.md deleted file mode 100644 index 26a17c65e3f..00000000000 --- a/doc/release-notes/10093-signedUrl_improvements.md +++ /dev/null @@ -1,5 +0,0 @@ -A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. - -SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. - -Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. diff --git a/doc/release-notes/10104-dataset-citation-deaccessioned.md b/doc/release-notes/10104-dataset-citation-deaccessioned.md deleted file mode 100644 index 0ba06d729c4..00000000000 --- a/doc/release-notes/10104-dataset-citation-deaccessioned.md +++ /dev/null @@ -1 +0,0 @@ -The getDatasetVersionCitation (/api/datasets/{id}/versions/{versionId}/citation) endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md new file mode 100644 index 00000000000..c2b52ab34b8 --- /dev/null +++ b/doc/release-notes/6.1-release-notes.md @@ -0,0 +1,195 @@ +# Dataverse 6.1 + +(If this note appears truncated on the GitHub Releases page, you can view it in full in the source tree: https://github.com/IQSS/dataverse/blob/master/doc/release-notes/6.1-release-notes.md) + +This release brings new features, enhancements, and bug fixes to the Dataverse software. +Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights (Major Upgrades, Breaking Changes) + +This release contains major upgrades to core components. Detailed upgrade instructions can be found below. + +## Detailed Release Highlights, New Features and Use Case Scenarios + +### Dataverse installation can be now be configured to allow out-of-band upload +- Installation can be now be configured to allow out-of-band upload by setting the `dataverse.files..upload-out-of-band` JVM option to `true`. +By default, Dataverse supports uploading files via the [add a file to a dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). +With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. + +### Alternative Title is made repeatable. +- One will need to update database with updated citation block. + `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` +- One will also need to update solr schema: + Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` + Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` + +Since Alternative Title is repeatable now, old json apis would not be compatable with a new version since value of alternative title has changed from simple string to an array. +For example, instead "value": "Alternative Title", the value canbe "value": ["Alternative Title1", "Alternative Title2"] + +### Improvements in the /versions API +- optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions +- a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output +- when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. + +This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide. +### The following API endpoints have been added: + +- /api/files/{id}/downloadCount +- /api/files/{id}/dataTables +- /api/files/{id}/metadata/tabularTags New endpoint to set tabular file tags. +- canManageFilePermissions (/access/datafile/{id}/userPermissions) Added for getting user permissions on a file. +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Given a dataset and its version, retrieves file counts based on different criteria (Total count, per content type, per access status and per category name). +- setFileCategories (/api/files/{id}/metadata/categories): Updates the categories (by name) for an existing file. If the specified categories do not exist, they will be created. +- userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file. +- hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. +- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession): version deaccessioning through API (Given a dataset and a version). +- getZipDownloadLimit (/api/info/zipDownloadLimit): Get the configured zip file download limit. The response contains the long value of the limit in bytes. +- getMaxEmbargoDurationInMonths (/api/info/settings/:MaxEmbargoDurationInMonths): Get the maximum embargo duration in months, if available, configured through the database setting :MaxEmbargoDurationInMonths. + +### Extended the existing endpoints: +- getVersionFiles (/api/datasets/{id}/versions/{versionId}/files): Extended to support optional filtering by search text through the `searchText` query parameter. The search will be applied to the labels and descriptions of the dataset files. Added `tabularTagName` to return files to which the particular tabular tag has been added. Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files. +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain file counts. Added support for filtering by optional criteria query parameter: + - contentType + - accessStatus + - categoryName + - tabularTagName + - searchText +- getDownloadSize ("api/datasets/{identifier}/versions/{versionId}/downloadsize"): Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files. Added a new optional query parameter "mode" +This parameter applies a filter criteria to the operation and supports the following values: + - All (Default): Includes both archival and original sizes for tabular files + - Archival: Includes only the archival size for tabular files + - Original: Includes only the original size for tabular files. +- /api/datasets/{id}/versions/{versionId} New query parameter `includeDeaccessioned` added to consider deaccessioned versions when searching for versions. +- /api/datasets/{id}/userPermissions Get user permissions on a dataset, in particular, the user permissions that this API call checks, returned as booleans, are the following: + - Can view the unpublished dataset + - Can edit the dataset + - Can publish the dataset + - Can manage the dataset permissions + - Can delete the dataset draft +- getDatasetVersionCitation (/api/datasets/{id}/versions/{versionId}/citation) endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. + + +### DataFile API payload has been extended to include the following fields: +- tabularData: Boolean field to know if the DataFile is of tabular type +- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) +- friendlyType: String + +### The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support pagination, ordering, and optional filtering +- Access status: through the `accessStatus` query parameter, which supports the following values: + - Public + - Restricted + - EmbargoedThenRestricted + - EmbargoedThenPublic +- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added. +- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png". + + +### Misc +- Configure tools are now available at the dataset level. They appear under the "Edit Dataset" menu. See also #9589. + +- Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). +The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. + +- Dataverse's OAI_ORE Metadata Export format and archival BagIT exports +(which include the OAI-ORE metadata export file) have been updated to include +information about the dataset version state, e.g. RELEASED or DEACCESSIONED +and to indicate which version of Dataverse was used to create the archival Bag. +As part of the latter, the current OAI_ORE Metadata format has been given a 1.0.0 +version designation and it is expected that any future changes to the OAI_ORE export +format will result in a version change and that tools such as DVUploader that can +recreate datasets from archival Bags will start indicating which version(s) of the +OAI_ORE format they can read. +Dataverse installations that have been using archival Bags may wish to update any +existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse +[archival Bag export API](https://guides.dataverse.org/en/latest/installation/config.html#bagit-export-api-calls) +to generate updated versions. + +- This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. + - To fix #9952, we surround the license info with `<` and `>`. + - To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information + - To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. + +- We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html +See also #10060. + +### Solr Improvements +- As of this release application-side support is added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. + +Please see the "Installing Solr" section of the Installation Prerequisites guide. + + +### Development +- Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. +For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools + +- There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews + +- A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. + - SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. + - Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. + +## OpenID Connect Authentication Provider Improvements + +### Using MicroProfile Config For Provisioning + +With this release it is possible to provision a single OIDC-based authentication provider +by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. + +If you are using an external OIDC provider component as an identity management system and/or broker +to other authentication providers such as Google, eduGain SAML and so on, this might make your +life easier during instance setups and reconfiguration. You no longer need to generate the +necessary JSON file. + +### Adding PKCE Support + +Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable +support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) + +## Improved Testing + +With this release, we add a new type of testing to Dataverse: integration tests which are no end-to-end tests +like our API tests. Starting with OIDC authentication support, we test regularly on CI for working condition +of both OIDC login options in UI and API. + +The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21. + +The support for setting JVM options during testing has been improved for developers. You now may add the +`@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is +also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. + +As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`. + +## New Configuration Options + +- dataverse.auth.oidc.enabled +- dataverse.auth.oidc.client-id +- dataverse.auth.oidc.client-secret +- dataverse.auth.oidc.auth-server-url +- dataverse.auth.oidc.pkce.enabled +- dataverse.auth.oidc.pkce.method +- dataverse.auth.oidc.title +- dataverse.auth.oidc.subtitle +- dataverse.auth.oidc.pkce.max-cache-size +- dataverse.auth.oidc.pkce.max-cache-age + +## Installation + +If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! + +Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club! + +You are also very welcome to join the [Global Dataverse Community Consortium](https://dataversecommunity.global) (GDCC). + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.0. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.1 Milestone](https://github.com/IQSS/dataverse/milestone/110?closed=1) in GitHub. + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. diff --git a/doc/release-notes/9268-8349-oidc-improvements.md b/doc/release-notes/9268-8349-oidc-improvements.md deleted file mode 100644 index ddfc13e603c..00000000000 --- a/doc/release-notes/9268-8349-oidc-improvements.md +++ /dev/null @@ -1,43 +0,0 @@ -## OpenID Connect Authentication Provider Improvements - -### Using MicroProfile Config For Provisioning - -With this release it is possible to provision a single OIDC-based authentication provider -by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. - -If you are using an external OIDC provider component as an identity management system and/or broker -to other authentication providers such as Google, eduGain SAML and so on, this might make your -life easier during instance setups and reconfiguration. You no longer need to generate the -necessary JSON file. - -### Adding PKCE Support - -Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable -support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) - -## Improved Testing - -With this release, we add a new type of testing to Dataverse: integration tests which are no end-to-end tests -like our API tests. Starting with OIDC authentication support, we test regularly on CI for working condition -of both OIDC login options in UI and API. - -The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21. - -The support for setting JVM options during testing has been improved for developers. You now may add the -`@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is -also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. - -As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`. - -## New Configuration Options - -- dataverse.auth.oidc.enabled -- dataverse.auth.oidc.client-id -- dataverse.auth.oidc.client-secret -- dataverse.auth.oidc.auth-server-url -- dataverse.auth.oidc.pkce.enabled -- dataverse.auth.oidc.pkce.method -- dataverse.auth.oidc.title -- dataverse.auth.oidc.subtitle -- dataverse.auth.oidc.pkce.max-cache-size -- dataverse.auth.oidc.pkce.max-cache-age diff --git a/doc/release-notes/9412-markdown-previewer.md b/doc/release-notes/9412-markdown-previewer.md deleted file mode 100644 index 8faa2679fb0..00000000000 --- a/doc/release-notes/9412-markdown-previewer.md +++ /dev/null @@ -1 +0,0 @@ -There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews diff --git a/doc/release-notes/9428-alternative-title.md b/doc/release-notes/9428-alternative-title.md deleted file mode 100644 index 3bc74f218b5..00000000000 --- a/doc/release-notes/9428-alternative-title.md +++ /dev/null @@ -1,9 +0,0 @@ -Alternative Title is made repeatable. -- One will need to update database with updated citation block. -`curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` -- One will also need to update solr schema: -Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` -Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` - -Since Alternative Title is repeatable now, old json apis would not be compatable with a new version since value of alternative title has changed from simple string to an array. -For example, instead "value": "Alternative Title", the value canbe "value": ["Alternative Title1", "Alternative Title2"] diff --git a/doc/release-notes/9589-ds-configure-tool.md b/doc/release-notes/9589-ds-configure-tool.md deleted file mode 100644 index 70ac5fcaa6a..00000000000 --- a/doc/release-notes/9589-ds-configure-tool.md +++ /dev/null @@ -1 +0,0 @@ -Configure tools are now available at the dataset level. They appear under the "Edit Dataset" menu. See also #9589. diff --git a/doc/release-notes/9590-intellij-redeploy.md b/doc/release-notes/9590-intellij-redeploy.md deleted file mode 100644 index 07af352ece4..00000000000 --- a/doc/release-notes/9590-intellij-redeploy.md +++ /dev/null @@ -1,3 +0,0 @@ -Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. - -For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools diff --git a/doc/release-notes/9599-guestbook-at-request.md b/doc/release-notes/9599-guestbook-at-request.md deleted file mode 100644 index e9554b71fb4..00000000000 --- a/doc/release-notes/9599-guestbook-at-request.md +++ /dev/null @@ -1,2 +0,0 @@ -Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). -The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. diff --git a/doc/release-notes/9635-solr-improvements.md b/doc/release-notes/9635-solr-improvements.md deleted file mode 100644 index ad55ee3afe6..00000000000 --- a/doc/release-notes/9635-solr-improvements.md +++ /dev/null @@ -1,4 +0,0 @@ -- As of this release application-side support is added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. - -Please see the "Installing Solr" section of the Installation Prerequisites guide. - diff --git a/doc/release-notes/9692-files-api-extension.md b/doc/release-notes/9692-files-api-extension.md deleted file mode 100644 index baa8e2f87cd..00000000000 --- a/doc/release-notes/9692-files-api-extension.md +++ /dev/null @@ -1,7 +0,0 @@ -The following API endpoints have been added: - -- /api/files/{id}/downloadCount -- /api/files/{id}/dataTables -- /access/datafile/{id}/userPermissions - -The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support pagination and ordering diff --git a/doc/release-notes/9714-files-api-extension-filters.md b/doc/release-notes/9714-files-api-extension-filters.md deleted file mode 100644 index 034230efe61..00000000000 --- a/doc/release-notes/9714-files-api-extension-filters.md +++ /dev/null @@ -1,14 +0,0 @@ -The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by: - -- Access status: through the `accessStatus` query parameter, which supports the following values: - - - Public - - Restricted - - EmbargoedThenRestricted - - EmbargoedThenPublic - - -- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added. - - -- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png". diff --git a/doc/release-notes/9763-versions-api-improvements.md b/doc/release-notes/9763-versions-api-improvements.md deleted file mode 100644 index 8d7f6c7a20a..00000000000 --- a/doc/release-notes/9763-versions-api-improvements.md +++ /dev/null @@ -1,8 +0,0 @@ -# Improvements in the /versions API - -- optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions; -- a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output; -- when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. - -This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide. - diff --git a/doc/release-notes/9785-files-api-extension-search-text.md b/doc/release-notes/9785-files-api-extension-search-text.md deleted file mode 100644 index fb185e1c7af..00000000000 --- a/doc/release-notes/9785-files-api-extension-search-text.md +++ /dev/null @@ -1,3 +0,0 @@ -The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by search text through the `searchText` query parameter. - -The search will be applied to the labels and descriptions of the dataset files. diff --git a/doc/release-notes/9834-files-api-extension-counts.md b/doc/release-notes/9834-files-api-extension-counts.md deleted file mode 100644 index 3ec15d8bd36..00000000000 --- a/doc/release-notes/9834-files-api-extension-counts.md +++ /dev/null @@ -1,6 +0,0 @@ -Implemented the following new endpoints: - -- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Given a dataset and its version, retrieves file counts based on different criteria (Total count, per content type, per access status and per category name). - - -- setFileCategories (/api/files/{id}/metadata/categories): Updates the categories (by name) for an existing file. If the specified categories do not exist, they will be created. diff --git a/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md deleted file mode 100644 index f306ae2ab80..00000000000 --- a/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md +++ /dev/null @@ -1,14 +0,0 @@ -Implemented the following new endpoints: - -- userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file. - - -- hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. - - -In addition, the DataFile API payload has been extended to include the following fields: - -- tabularData: Boolean field to know if the DataFile is of tabular type - - -- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) diff --git a/doc/release-notes/9852-files-api-extension-deaccession.md b/doc/release-notes/9852-files-api-extension-deaccession.md deleted file mode 100644 index 55698580e3c..00000000000 --- a/doc/release-notes/9852-files-api-extension-deaccession.md +++ /dev/null @@ -1,12 +0,0 @@ -Extended the existing endpoints: - -- getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) -- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts) - -The above endpoints now accept a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files or file counts. - -Additionally, a new endpoint has been developed to support version deaccessioning through API (Given a dataset and a version). - -- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession) - -Finally, the DataFile API payload has been extended to add the field "friendlyType" diff --git a/doc/release-notes/9880-info-api-zip-limit-embargo.md b/doc/release-notes/9880-info-api-zip-limit-embargo.md deleted file mode 100644 index d2afb139e72..00000000000 --- a/doc/release-notes/9880-info-api-zip-limit-embargo.md +++ /dev/null @@ -1,5 +0,0 @@ -Implemented the following new endpoints: - -- getZipDownloadLimit (/api/info/zipDownloadLimit): Get the configured zip file download limit. The response contains the long value of the limit in bytes. - -- getMaxEmbargoDurationInMonths (/api/info/settings/:MaxEmbargoDurationInMonths): Get the maximum embargo duration in months, if available, configured through the database setting :MaxEmbargoDurationInMonths. diff --git a/doc/release-notes/9907-files-api-counts-with-criteria.md b/doc/release-notes/9907-files-api-counts-with-criteria.md deleted file mode 100644 index 07cd23daad0..00000000000 --- a/doc/release-notes/9907-files-api-counts-with-criteria.md +++ /dev/null @@ -1,11 +0,0 @@ -Extended the getVersionFileCounts endpoint (/api/datasets/{id}/versions/{versionId}/files/counts) to support filtering by criteria. - -In particular, the endpoint now accepts the following optional criteria query parameters: - -- contentType -- accessStatus -- categoryName -- tabularTagName -- searchText - -This filtering criteria is the same as the one for the getVersionFiles endpoint. diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md deleted file mode 100644 index db0e27e51c5..00000000000 --- a/doc/release-notes/9955-Signposting-updates.md +++ /dev/null @@ -1,7 +0,0 @@ -This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. - -To fix #9952, we surround the license info with `<` and `>`. - -To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information - -To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. diff --git a/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md deleted file mode 100644 index 2ede679b361..00000000000 --- a/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md +++ /dev/null @@ -1,9 +0,0 @@ -Added a new optional query parameter "mode" to the "getDownloadSize" API endpoint ("api/datasets/{identifier}/versions/{versionId}/downloadsize"). - -This parameter applies a filter criteria to the operation and supports the following values: - -- All (Default): Includes both archival and original sizes for tabular files - -- Archival: Includes only the archival size for tabular files - -- Original: Includes only the original size for tabular files diff --git a/doc/release-notes/9972-files-api-filter-by-tabular-tags.md b/doc/release-notes/9972-files-api-filter-by-tabular-tags.md deleted file mode 100644 index 9c3fced1741..00000000000 --- a/doc/release-notes/9972-files-api-filter-by-tabular-tags.md +++ /dev/null @@ -1,3 +0,0 @@ -- New query parameter `tabularTagName` added to the getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) to return files to which the particular tabular tag has been added. - -- New endpoint to set tabular file tags via API: /api/files/{id}/metadata/tabularTags. diff --git a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md deleted file mode 100644 index 020224b2094..00000000000 --- a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md +++ /dev/null @@ -1,12 +0,0 @@ -Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/downloadsize), including the following new features: - -- The endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned dataset versions when searching for versions to obtain the file total download size. - - -- The endpoint now supports filtering by criteria. In particular, it accepts the following optional criteria query parameters: - - - contentType - - accessStatus - - categoryName - - tabularTagName - - searchText From b077d98a11e6957085757c54c48030ef33b50c30 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 4 Dec 2023 13:30:03 -0500 Subject: [PATCH 0373/1112] doc update, release note --- doc/release-notes/10162-globus-support.md | 14 ++++++++++++++ .../source/developers/big-data-support.rst | 7 +++++-- 2 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 doc/release-notes/10162-globus-support.md diff --git a/doc/release-notes/10162-globus-support.md b/doc/release-notes/10162-globus-support.md new file mode 100644 index 00000000000..d64e72b70a1 --- /dev/null +++ b/doc/release-notes/10162-globus-support.md @@ -0,0 +1,14 @@ +Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible, and for referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. Further details can be found in the [Big Data Support section of the Dataverse Guides](https://guides.dataverse.org/en/latest/developers/big-data-support.html#big-data-support) +- Globus functionality remains 'experimental'/advanced in that it requires significant setup, differs in multiple ways from other file storage mechanisms, and may continue to evolve with the potential for backward incomatibilities. +- The functionality is configured per store and replaces the previous single-S3-Connector-per-Dataverse-instance model +- Adding files to a dataset, and accessing files is supported via the Dataverse user interface through a separate [dataverse-globus app](https://github.com/scholarsportal/dataverse-globus) +- The functionality is also accessible via APIs (combining calls to the Dataverse and Globus APIs) + +Backward Incompatibilities: +- The configuration for use of a Globus S3 Connector has changed and is aligned with the standard store configuration mechanism +- The new functionality is incompatible with older versions of the globus-dataverse app and the Globus-related functionality in the UI will only function correctly if a Dataverse 6.1 compatible version of the dataverse-globus app is configured. + +New JVM Options: +- A new 'globus' store type and associated store-related options have been added. These are described in the [File Storage Options section of the Dataverse Guides](https://guides.dataverse.org/en/latest/installation/config.html#file-storage-using-a-local-filesystem-and-or-swift-and-or-object-stores-and-or-trusted-remote-stores). + +Obsolete Settings: the :GlobusBasicToken, :GlobusEndpoint, and :GlobusStores settings are no longer used diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index d38f7f27a68..fe49f9f6150 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -152,8 +152,6 @@ Note: Globus file transfer is still experimental but feedback is welcome! See :r Users can transfer files via `Globus `_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) and a community-developed `dataverse-globus `_ app has been properly installed and configured. -Due to differences in the access control models of a Dataverse installation and Globus, enabling the Globus capability on a store will disable the ability to restrict and embargo files in that store. - Globus endpoints can be in a variety of places, from data centers to personal computers. This means that from within the Dataverse software, a Globus transfer can feel like an upload or a download (with Globus Personal Connect running on your laptop, for example) or it can feel like a true transfer from one server to another (from a cluster in a data center into a Dataverse dataset or vice versa). @@ -162,11 +160,16 @@ Globus transfer uses an efficient transfer mechanism and has additional features * robust file transfer capable of restarting after network or endpoint failures * third-party transfer, which enables a user accessing a Dataverse installation in their desktop browser to initiate transfer of their files from a remote endpoint (i.e. on a local high-performance computing cluster), directly to an S3 store managed by the Dataverse installation +Note: Due to differences in the access control models of a Dataverse installation and Globus and the current Globus store model, Dataverse cannot enforce per-file-access restrictions. +It is therefore recommended that a store be configured as public, which disables the ability to restrict and embargo files in that store, when Globus access is allowed. + Dataverse supports three options for using Globus, two involving transfer to Dataverse-managed endpoints and one allowing Dataverse to reference files on remote endpoints. Dataverse-managed endpoints must be Globus 'guest collections' hosted on either a file-system-based endpoint or an S3-based endpoint (the latter requires use of the Globus S3 connector which requires a paid Globus subscription at the host institution). In either case, Dataverse is configured with the Globus credentials of a user account that can manage the endpoint. Users will need a Globus account, which can be obtained via their institution or directly from Globus (at no cost). +With the file-system endpoint, Dataverse does not currently have access to the file contents. Thus, functionlity related to ingest, previews, fixity hash validation, etc. are not available. (Using the S3-based endpoint, Dataverse has access via S3 and all functionlity normally associated with direct uploads to S3 is available.) + For the reference use case, Dataverse must be configured with a list of allowed endpoint/base paths from which files may be referenced. In this case, since Dataverse is not accessing the remote endpoint itself, it does not need Globus credentials. Users will need a Globus account in this case, and the remote endpoint must be configured to allow them access (i.e. be publicly readable, or potentially involving some out-of-band mechanism to request access (that could be described in the dataset's Terms of Use and Access). From 547d71c342e08ebdf674d8754dc072465ad20651 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 4 Dec 2023 14:31:07 -0500 Subject: [PATCH 0374/1112] #9464 add more detail to validation error message --- .../edu/harvard/iq/dataverse/DataverseServiceBean.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index ed46caf65a1..027e58d9263 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -1072,7 +1072,12 @@ public String isDatasetJsonValid(String dataverseAlias, String jsonInput) { schema.validate(new JSONObject(jsonInput)); // throws a ValidationException if this object is invalid } catch (ValidationException vx) { logger.info(BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + vx.getErrorMessage()); - return BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + vx.getErrorMessage(); + String accumulatedexceptions = ""; + for (ValidationException va : vx.getCausingExceptions()){ + accumulatedexceptions = accumulatedexceptions + va; + accumulatedexceptions = accumulatedexceptions.replace("org.everit.json.schema.ValidationException:", " "); + } + return BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + accumulatedexceptions; } catch (Exception ex) { logger.info(BundleUtil.getStringFromBundle("dataverses.api.validate.json.exception") + ex.getLocalizedMessage()); return BundleUtil.getStringFromBundle("dataverses.api.validate.json.exception") + ex.getLocalizedMessage(); From fc3ae08ec9335ac857af4d9c112e892255ef1c7a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 4 Dec 2023 14:44:00 -0500 Subject: [PATCH 0375/1112] adding documentation --- .../edu/harvard/iq/dataverse/DatasetPage.java | 21 ++ .../harvard/iq/dataverse/api/Datasets.java | 238 +++++++++++------- 2 files changed, 163 insertions(+), 96 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 704c1d42228..f871d2e5198 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -6346,6 +6346,27 @@ public boolean isGlobusTransferRequested() { return globusTransferRequested; } + /** + * Analagous with the startDownload method, this method is called when the user + * tries to start a Globus transfer out (~download). The + * validateFilesForDownload call checks to see if there are some files that can + * be Globus transfered and, if so and there are no files that can't be + * transferre, this method will launch the globus transfer app. If there is a + * mix of files or if the guestbook popup is required, the method passes back to + * the UI so those popup(s) can be shown. Once they are, this method is called + * with the popupShown param true and the app will be shown. + * + * @param transferAll - when called from the dataset Access menu, this should be + * true so that all files are included in the processing. + * When it is called from the file table, the current + * selection is used and the param should be false. + * @param popupShown - This method is called twice if the the mixed files or + * guestbook popups are needed. On the first call, popupShown + * is false so that the transfer is not started and those + * popups can be shown. On the second call, popupShown is + * true and processing will occur as long as there are some + * valid files to transfer. + */ public void startGlobusTransfer(boolean transferAll, boolean popupShown) { if (transferAll) { this.setSelectedFiles(workingVersion.getFileMetadatas()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 5961b428bcb..ae576134be3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3444,90 +3444,34 @@ public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam(" } - @POST - @AuthRequired - @Path("{id}/addGlobusFiles") - @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, - @PathParam("id") String datasetId, - @FormDataParam("jsonData") String jsonData, - @Context UriInfo uriInfo - ) throws IOException, ExecutionException, InterruptedException { - - logger.info(" ==== (api addGlobusFilesToDataset) jsonData ====== " + jsonData); - - if (!systemConfig.isHTTPUpload()) { - return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); - } - - // ------------------------------------- - // (1) Get the user from the API key - // ------------------------------------- - AuthenticatedUser authUser; - try { - authUser = getRequestAuthenticatedUserOrDie(crc); - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") - ); - } - - // ------------------------------------- - // (2) Get the Dataset Id - // ------------------------------------- - Dataset dataset; - - try { - dataset = findDatasetOrDie(datasetId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - - JsonObject jsonObject = null; - try { - jsonObject = JsonUtil.getJsonObject(jsonData); - } catch (Exception ex) { - logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage()); - return badRequest("Error parsing json body"); - - } - - //------------------------------------ - // (2b) Make sure dataset does not have package file - // -------------------------------------- - - for (DatasetVersion dv : dataset.getVersions()) { - if (dv.isHasPackageFile()) { - return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") - ); - } - } - - - String lockInfoMessage = "Globus Upload API started "; - DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, - (authUser).getId(), lockInfoMessage); - if (lock != null) { - dataset.addLock(lock); - } else { - logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); - } - - - ApiToken token = authSvc.findApiTokenByUser(authUser); - - if(uriInfo != null) { - logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + uriInfo.getRequestUri().toString()); - } - - - String requestUrl = SystemConfig.getDataverseSiteUrlStatic(); - - // Async Call - globusService.globusUpload(jsonObject, token, dataset, requestUrl, authUser); - - return ok("Async call to Globus Upload started "); - - } +/**************************** + * Globus Support Section: + * + * Globus transfer in (upload) and out (download) involve three basic steps: The + * app is launched and makes a callback to the + * globusUploadParameters/globusDownloadParameters method to get all of the info + * needed to set up it's display. + * + * At some point after that, the user will make a selection as to which files to + * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload + * to indicate a transfer is about to start. In addition to providing the + * details of where to transfer the files to/from, Dataverse also grants the + * Globus principal involved the relevant rw or r permission for the dataset. + * + * Once the transfer is started, the app records the task id and sends it to + * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then + * monitors the transfer task and when it ultimately succeeds for fails it + * revokes the principal's permission and, for the transfer in case, adds the + * files to the dataset. (The dataset is locked until the transfer completes.) + * + * (If no transfer is started within a specified timeout, permissions will + * automatically be revoked - see the GlobusServiceBean for details.) + * + * The option to reference a file at a remote endpoint (rather than transfer it) + * follows the first two steps of the process above but completes with a call to + * the normal /addFiles endpoint (as there is no transfer to monitor and the + * files can be added to the dataset immediately.) + */ /** * Retrieve the parameters and signed URLs required to perform a globus @@ -3630,11 +3574,11 @@ public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @Pat } /** - * Requests permissions for a given globus user to upload to the dataset + * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset * * @param crc * @param datasetId - * @param jsonData + * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred. * @return * @throws IOException * @throws ExecutionException @@ -3721,15 +3665,114 @@ public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathP } - /** - * Retrieve the parameters and signed URLs required to perform a globus - * transfer/download. This api endpoint is expected to be called as a signed - * callback after the globus-dataverse app/other app is launched, but it will - * accept other forms of authentication. + /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor * * @param crc * @param datasetId + * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required. + * @param uriInfo + * @return + * @throws IOException + * @throws ExecutionException + * @throws InterruptedException */ + @POST + @AuthRequired + @Path("{id}/addGlobusFiles") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @FormDataParam("jsonData") String jsonData, + @Context UriInfo uriInfo + ) throws IOException, ExecutionException, InterruptedException { + + logger.info(" ==== (api addGlobusFilesToDataset) jsonData ====== " + jsonData); + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + AuthenticatedUser authUser; + try { + authUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") + ); + } + + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + JsonObject jsonObject = null; + try { + jsonObject = JsonUtil.getJsonObject(jsonData); + } catch (Exception ex) { + logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage()); + return badRequest("Error parsing json body"); + + } + + //------------------------------------ + // (2b) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + + + String lockInfoMessage = "Globus Upload API started "; + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, + (authUser).getId(), lockInfoMessage); + if (lock != null) { + dataset.addLock(lock); + } else { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); + } + + + ApiToken token = authSvc.findApiTokenByUser(authUser); + + if(uriInfo != null) { + logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + uriInfo.getRequestUri().toString()); + } + + + String requestUrl = SystemConfig.getDataverseSiteUrlStatic(); + + // Async Call + globusService.globusUpload(jsonObject, token, dataset, requestUrl, authUser); + + return ok("Async call to Globus Upload started "); + + } + +/** + * Retrieve the parameters and signed URLs required to perform a globus + * transfer/download. This api endpoint is expected to be called as a signed + * callback after the globus-dataverse app/other app is launched, but it will + * accept other forms of authentication. + * + * @param crc + * @param datasetId + * @param locale + * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL. + * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. + */ @GET @AuthRequired @Path("{id}/globusDownloadParameters") @@ -3815,12 +3858,14 @@ public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @P /** * Requests permissions for a given globus user to download the specified files - * the dataset + * the dataset and returns information about the paths to transfer from. + * + * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer. * * @param crc * @param datasetId - * @param jsonData - * @return + * @param jsonData - a JSON object that must include the id of the Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required. + * @return - a JSON object containing a map of file ids to Globus endpoint/path * @throws IOException * @throws ExecutionException * @throws InterruptedException @@ -3957,11 +4002,12 @@ public Response requestGlobusDownload(@Context ContainerRequestContext crc, @Pat /** * Monitors a globus download and removes permissions on the dir/dataset when - * done + * the specified transfer task is completed. * * @param crc * @param datasetId - * @param jsonData + * @param jsonData - a JSON Object containing the key "taskIdentifier" with the + * Globus task to monitor. * @return * @throws IOException * @throws ExecutionException From 7697157ac98049dea45a2bd98193aad75e6037e1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 4 Dec 2023 15:27:21 -0500 Subject: [PATCH 0376/1112] #9464 handle single errors --- .../edu/harvard/iq/dataverse/DataverseServiceBean.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 027e58d9263..07e7fe615e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -1077,7 +1077,12 @@ public String isDatasetJsonValid(String dataverseAlias, String jsonInput) { accumulatedexceptions = accumulatedexceptions + va; accumulatedexceptions = accumulatedexceptions.replace("org.everit.json.schema.ValidationException:", " "); } - return BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + accumulatedexceptions; + if (!accumulatedexceptions.isEmpty()){ + return BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + accumulatedexceptions; + } else { + return BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + vx.getErrorMessage(); + } + } catch (Exception ex) { logger.info(BundleUtil.getStringFromBundle("dataverses.api.validate.json.exception") + ex.getLocalizedMessage()); return BundleUtil.getStringFromBundle("dataverses.api.validate.json.exception") + ex.getLocalizedMessage(); From 8ec61d084a81c7d5786bd583177b80255aa7e883 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 4 Dec 2023 15:58:21 -0500 Subject: [PATCH 0377/1112] cleanup, add method stubs, open for basestore, info->fine --- .../AbstractRemoteOverlayAccessIO.java | 12 +- .../dataaccess/GlobusAccessibleStore.java | 6 + .../dataaccess/GlobusOverlayAccessIO.java | 142 ++++++++++++------ 3 files changed, 112 insertions(+), 48 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java index 16defc26a4f..8d058b7c9e3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java @@ -15,11 +15,8 @@ import javax.net.ssl.SSLContext; -import org.apache.http.Header; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpHead; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; @@ -30,15 +27,18 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.apache.http.protocol.HTTP; import org.apache.http.ssl.SSLContextBuilder; -import org.apache.http.util.EntityUtils; - import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; + +/** + * A base class for StorageIO implementations supporting remote access. At present, that includes the RemoteOverlayAccessIO store and the newer GlobusOverlayAccessIO store. It primarily includes + * common methods for handling auxiliary files in the configured base store. + * @param + */ public abstract class AbstractRemoteOverlayAccessIO extends StorageIO { protected static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java index d827e40e807..e4d062f0619 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java @@ -6,7 +6,13 @@ public interface GlobusAccessibleStore { + //Whether Dataverse manages access controls for the Globus endpoint or not. static final String MANAGED = "managed"; + /* + * transfer and reference endpoint formats: + * + * REFERENCE_ENDPOINTS_WITH_BASEPATHS - reference endpoints separated by a comma + */ static final String TRANSFER_ENDPOINT_WITH_BASEPATH = "transfer-endpoint-with-basepath"; static final String GLOBUS_TOKEN = "globus-token"; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index e825af8cf30..7a6809cb2ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -2,12 +2,15 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.globus.AccessToken; import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URI; @@ -16,6 +19,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; +import java.util.List; import java.util.logging.Logger; import org.apache.http.client.ClientProtocolException; @@ -32,9 +36,18 @@ /** * @author qqmyers - */ + * + * This class implements three related use cases, all of which leverage the underlying idea of using a base store (as with the Https RemoteOverlay store): + * Managed - where Dataverse has control of the specified Globus endpoint and can set/remove permissions as needed to allow file transfers in/out: + * File/generic endpoint - assumes Dataverse does not have access to the datafile contents + * S3-Connector endpoint - assumes the datafiles are accessible via Globus and via S3 such that Dataverse can access to the datafile contents when needed. + * Remote - where Dataverse references files that remain at remote Globus endpoints (as with the Https RemoteOverlay store) and cannot access to the datafile contents. + * + * Note that Globus endpoints can provide Http URLs to get file contents, so a future enhancement could potentially support datafile contents access in the Managed/File and Remote cases. + * + * */ /* - * Globus Overlay Driver + * Globus Overlay Driver storageIdentifer format: * * Remote: StorageIdentifier format: * ://// @@ -47,11 +60,6 @@ * * Storage location: * /// - * - * transfer and reference endpoint formats: - * - * - * reference endpoints separated by a comma * */ public class GlobusOverlayAccessIO extends AbstractRemoteOverlayAccessIO implements GlobusAccessibleStore { @@ -115,7 +123,6 @@ private String retrieveGlobusAccessToken() { return accessToken.getOtherTokens().get(0).getAccessToken(); } - private void parsePath() { int filenameStart = path.lastIndexOf("/") + 1; String endpointWithBasePath = null; @@ -126,9 +133,9 @@ private void parsePath() { } //String endpointWithBasePath = baseEndpointPath.substring(baseEndpointPath.lastIndexOf(DataAccess.SEPARATOR) + 3); int pathStart = endpointWithBasePath.indexOf("/"); - logger.info("endpointWithBasePath: " + endpointWithBasePath); + logger.fine("endpointWithBasePath: " + endpointWithBasePath); endpointPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart + 1) : ""); - logger.info("endpointPath: " + endpointPath); + logger.fine("endpointPath: " + endpointPath); if (isManaged() && (dvObject!=null)) { @@ -146,7 +153,7 @@ private void parsePath() { if (filenameStart > 0) { relativeDirectoryPath = relativeDirectoryPath + path.substring(0, filenameStart); } - logger.info("relativeDirectoryPath finally: " + relativeDirectoryPath); + logger.fine("relativeDirectoryPath finally: " + relativeDirectoryPath); filename = path.substring(filenameStart); endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart) : endpointWithBasePath; @@ -171,7 +178,7 @@ protected void validatePath(String relPath) throws IOException { } else { try { String endpoint = findMatchingEndpoint(relPath, allowedEndpoints); - logger.info(endpoint + " " + relPath); + logger.fine(endpoint + " " + relPath); if (endpoint == null || !Paths.get(endpoint, relPath).normalize().startsWith(endpoint)) { throw new IOException( @@ -189,7 +196,6 @@ protected void validatePath(String relPath) throws IOException { public long retrieveSizeFromMedia() { parsePath(); String globusAccessToken = retrieveGlobusAccessToken(); - logger.info("GAT2: " + globusAccessToken); // Construct Globus URL URI absoluteURI = null; try { @@ -198,13 +204,12 @@ public long retrieveSizeFromMedia() { + "/ls?path=" + endpointPath + relativeDirectoryPath + "&filter=name:" + filename); HttpGet get = new HttpGet(absoluteURI); - logger.info("Token is " + globusAccessToken); get.addHeader("Authorization", "Bearer " + globusAccessToken); CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); if (response.getStatusLine().getStatusCode() == 200) { // Get reponse as string String responseString = EntityUtils.toString(response.getEntity()); - logger.info("Response from " + get.getURI().toString() + " is: " + responseString); + logger.fine("Response from " + get.getURI().toString() + " is: " + responseString); JsonObject responseJson = JsonUtil.getJsonObject(responseString); JsonArray dataArray = responseJson.getJsonArray("DATA"); if (dataArray != null && dataArray.size() != 0) { @@ -214,7 +219,7 @@ public long retrieveSizeFromMedia() { } else { logger.warning("Response from " + get.getURI().toString() + " was " + response.getStatusLine().getStatusCode()); - logger.info(EntityUtils.toString(response.getEntity())); + logger.fine(EntityUtils.toString(response.getEntity())); } } catch (URISyntaxException e) { // Should have been caught in validatePath @@ -258,16 +263,15 @@ public void delete() throws IOException { absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/submission_id"); HttpGet get = new HttpGet(absoluteURI); - logger.info("Token is " + globusAccessToken); get.addHeader("Authorization", "Bearer " + globusAccessToken); CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); if (response.getStatusLine().getStatusCode() == 200) { // Get reponse as string String responseString = EntityUtils.toString(response.getEntity()); - logger.info("Response from " + get.getURI().toString() + " is: " + responseString); + logger.fine("Response from " + get.getURI().toString() + " is: " + responseString); JsonObject responseJson = JsonUtil.getJsonObject(responseString); String submissionId = responseJson.getString("value"); - logger.info("submission_id for delete is: " + submissionId); + logger.fine("submission_id for delete is: " + submissionId); absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/delete"); HttpPost post = new HttpPost(absoluteURI); JsonObjectBuilder taskJsonBuilder = Json.createObjectBuilder(); @@ -277,30 +281,30 @@ public void delete() throws IOException { post.setHeader("Content-Type", "application/json"); post.addHeader("Authorization", "Bearer " + globusAccessToken); String taskJson= JsonUtil.prettyPrint(taskJsonBuilder.build()); - logger.info("Sending: " + taskJson); + logger.fine("Sending: " + taskJson); post.setEntity(new StringEntity(taskJson, "utf-8")); CloseableHttpResponse postResponse = getSharedHttpClient().execute(post, localContext); int statusCode=postResponse.getStatusLine().getStatusCode(); - logger.info("Response :" + statusCode + ": " +postResponse.getStatusLine().getReasonPhrase()); + logger.fine("Response :" + statusCode + ": " +postResponse.getStatusLine().getReasonPhrase()); switch (statusCode) { case 202: // ~Success - delete task was accepted - logger.info("Globus delete initiated: " + EntityUtils.toString(postResponse.getEntity())); + logger.fine("Globus delete initiated: " + EntityUtils.toString(postResponse.getEntity())); break; case 200: // Duplicate - delete task was already accepted - logger.info("Duplicate Globus delete: " + EntityUtils.toString(postResponse.getEntity())); + logger.warning("Duplicate Globus delete: " + EntityUtils.toString(postResponse.getEntity())); break; default: logger.warning("Response from " + post.getURI().toString() + " was " + postResponse.getStatusLine().getStatusCode()); - logger.info(EntityUtils.toString(postResponse.getEntity())); + logger.fine(EntityUtils.toString(postResponse.getEntity())); } } else { logger.warning("Response from " + get.getURI().toString() + " was " + response.getStatusLine().getStatusCode()); - logger.info(EntityUtils.toString(response.getEntity())); + logger.fine(EntityUtils.toString(response.getEntity())); } } catch (Exception e) { logger.warning(e.getMessage()); @@ -383,7 +387,7 @@ public String getStorageLocation() throws IOException { */ protected void configureGlobusEndpoints() throws IOException { allowedEndpoints = getAllowedEndpoints(this.driverId); - logger.info("Set allowed endpoints: " + Arrays.toString(allowedEndpoints)); + logger.fine("Set allowed endpoints: " + Arrays.toString(allowedEndpoints)); } private static String[] getAllowedEndpoints(String driverId) throws IOException { @@ -409,37 +413,91 @@ private static String[] getAllowedEndpoints(String driverId) throws IOException @Override - public void open(DataAccessOption... option) throws IOException { - // TODO Auto-generated method stub - - } + public void open(DataAccessOption... options) throws IOException { + + baseStore.open(options); + + DataAccessRequest req = this.getRequest(); + + if (isWriteAccessRequested(options)) { + isWriteAccess = true; + isReadAccess = false; + } else { + isWriteAccess = false; + isReadAccess = true; + } + + if (dvObject instanceof DataFile) { + String storageIdentifier = dvObject.getStorageIdentifier(); + + DataFile dataFile = this.getDataFile(); + + if (req != null && req.getParameter("noVarHeader") != null) { + baseStore.setNoVarHeader(true); + } + + if (storageIdentifier == null || "".equals(storageIdentifier)) { + throw new FileNotFoundException("Data Access: No local storage identifier defined for this datafile."); + } + + logger.fine("StorageIdentifier is: " + storageIdentifier); + + if (isReadAccess) { + if (dataFile.getFilesize() >= 0) { + this.setSize(dataFile.getFilesize()); + } else { + logger.fine("Setting size"); + this.setSize(retrieveSizeFromMedia()); + } + // Only applies for the S3 Connector case (where we could have run an ingest) + if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") + && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) { + + List datavariables = dataFile.getDataTable().getDataVariables(); + String varHeaderLine = generateVariableHeader(datavariables); + this.setVarHeader(varHeaderLine); + } + + } + this.setMimeType(dataFile.getContentType()); + + try { + this.setFileName(dataFile.getFileMetadata().getLabel()); + } catch (Exception ex) { + this.setFileName("unknown"); + } + } else if (dvObject instanceof Dataset) { + throw new IOException( + "Data Access: " + this.getClass().getName() + " does not support dvObject type Dataverse yet"); + } else if (dvObject instanceof Dataverse) { + throw new IOException( + "Data Access: " + this.getClass().getName() + " does not support dvObject type Dataverse yet"); + } + } @Override public Path getFileSystemPath() throws IOException { - // TODO Auto-generated method stub - return null; + throw new UnsupportedDataAccessOperationException( + this.getClass().getName() + ": savePath() not implemented in this storage driver."); } - @Override public void savePath(Path fileSystemPath) throws IOException { - // TODO Auto-generated method stub - + throw new UnsupportedDataAccessOperationException( + this.getClass().getName() + ": savePath() not implemented in this storage driver."); } - @Override public void saveInputStream(InputStream inputStream) throws IOException { - // TODO Auto-generated method stub - + throw new UnsupportedDataAccessOperationException( + this.getClass().getName() + ": savePath() not implemented in this storage driver."); } - @Override public void saveInputStream(InputStream inputStream, Long filesize) throws IOException { - // TODO Auto-generated method stub - + throw new UnsupportedDataAccessOperationException( + this.getClass().getName() + ": savePath() not implemented in this storage driver."); } - + } From 38c120e13d2e1276324b903be58306520168b577 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 4 Dec 2023 18:21:41 -0500 Subject: [PATCH 0378/1112] cleanup, delete obsolete methods, change to private, info->fine --- .../harvard/iq/dataverse/api/Datasets.java | 4 +- .../dataverse/globus/GlobusServiceBean.java | 461 +++--------------- .../iq/dataverse/settings/JvmSettings.java | 2 +- src/main/webapp/globus.xhtml | 30 -- 4 files changed, 78 insertions(+), 419 deletions(-) delete mode 100644 src/main/webapp/globus.xhtml diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index ae576134be3..cb57acd3b86 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3543,7 +3543,7 @@ public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @Pat } else { params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths); } - int timeoutSeconds = JvmSettings.GLOBUS_RULES_CACHE_MAXAGE.lookup(Integer.class); + int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class); JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder(); String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths"; allowedApiCalls.add( @@ -3833,7 +3833,7 @@ public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @P params.add("endpoint", transferEndpoint); } params.add("files", files); - int timeoutSeconds = JvmSettings.GLOBUS_RULES_CACHE_MAXAGE.lookup(Integer.class); + int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class); JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder(); allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload") .add(URLTokenUtil.HTTP_METHOD, "POST") diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 0c991424ce9..37959188857 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -97,34 +97,6 @@ public class GlobusServiceBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(GlobusServiceBean.class.getCanonicalName()); private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - private String code; - private String userTransferToken; - private String state; - - public String getState() { - return state; - } - - public void setState(String state) { - this.state = state; - } - - public String getCode() { - return code; - } - - public void setCode(String code) { - this.code = code; - } - - public String getUserTransferToken() { - return userTransferToken; - } - - public void setUserTransferToken(String userTransferToken) { - this.userTransferToken = userTransferToken; - } - private String getRuleId(GlobusEndpoint endpoint, String principal, String permissions) throws MalformedURLException { @@ -152,33 +124,6 @@ private String getRuleId(GlobusEndpoint endpoint, String principal, String permi return null; } - /* - * public void updatePermision(AccessToken clientTokenUser, String directory, - * String principalType, String perm) throws MalformedURLException { if - * (directory != null && !directory.equals("")) { directory = directory + "/"; } - * logger.info("Start updating permissions." + " Directory is " + directory); - * String globusEndpoint = - * settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - * ArrayList rules = checkPermisions(clientTokenUser, directory, - * globusEndpoint, principalType, null); logger.info("Size of rules " + - * rules.size()); int count = 0; while (count < rules.size()) { - * logger.info("Start removing rules " + rules.get(count)); Permissions - * permissions = new Permissions(); permissions.setDATA_TYPE("access"); - * permissions.setPermissions(perm); permissions.setPath(directory); - * - * Gson gson = new GsonBuilder().create(); URL url = new - * URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint - * + "/access/" + rules.get(count)); - * logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + - * globusEndpoint + "/access/" + rules.get(count)); MakeRequestResponse result = - * makeRequest(url, "Bearer", - * clientTokenUser.getOtherTokens().get(0).getAccessToken(), "PUT", - * gson.toJson(permissions)); if (result.status != 200) { - * logger.warning("Cannot update access rule " + rules.get(count)); } else { - * logger.info("Access rule " + rules.get(count) + " was updated"); } count++; } - * } - */ - /** * Call to delete a globus rule related to the specified dataset. * @@ -214,6 +159,13 @@ public void deletePermission(String ruleId, Dataset dataset, Logger globusLogger } } + /** Request read/write access for the specified principal and generate a list of accessible paths for new files for the specified dataset. + * + * @param principal - the id of the Globus principal doing the transfer + * @param dataset + * @param numberOfPaths - how many files are to be transferred + * @return + */ public JsonObject requestAccessiblePaths(String principal, Dataset dataset, int numberOfPaths) { GlobusEndpoint endpoint = getGlobusEndpoint(dataset); @@ -278,6 +230,12 @@ private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissi } } + /** Given an array of remote files to be referenced in the dataset, create a set of valid storage identifiers and return a map of the remote file paths to storage identifiers. + * + * @param dataset + * @param referencedFiles - a JSON array of remote files to be referenced in the dataset - each should be a string with the /path/to/file + * @return - a map of supplied paths to valid storage identifiers + */ public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray referencedFiles) { String driverId = dataset.getEffectiveStorageDriverId(); JsonArray endpoints = GlobusAccessibleStore.getReferenceEndpointsWithPaths(driverId); @@ -304,39 +262,38 @@ public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray ref return fileMap.build(); } + + /** A cache of temporary permission requests - for upload (rw) and download (r) access. + * When a temporary permission request is created, it is added to the cache. After GLOBUS_CACHE_MAXAGE minutes, if a transfer has not been started, the permission will be revoked/deleted. + * (If a transfer has been started, the permission will not be revoked/deleted until the transfer is complete. This is handled in other methods.) + */ // Single cache of open rules/permission requests private final Cache rulesCache = Caffeine.newBuilder() .expireAfterWrite( - Duration.of(JvmSettings.GLOBUS_RULES_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) + Duration.of(JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) .scheduler(Scheduler.systemScheduler()).evictionListener((ruleId, datasetId, cause) -> { // Delete rules that expire - logger.info("Rule " + ruleId + " expired"); + logger.fine("Rule " + ruleId + " expired"); Dataset dataset = datasetSvc.find(datasetId); deletePermission((String) ruleId, dataset, logger); }) .build(); + //Convenience method to add a temporary permission request to the cache - allows logging of temporary permission requests private void monitorTemporaryPermissions(String ruleId, long datasetId) { - logger.info("Adding rule " + ruleId + " for dataset " + datasetId); + logger.fine("Adding rule " + ruleId + " for dataset " + datasetId); rulesCache.put(ruleId, datasetId); } - public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId) throws MalformedURLException { - - URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/" + taskId - + "/successful_transfers"); - - MakeRequestResponse result = makeRequest(url, "Bearer", - clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null); - - if (result.status == 200) { - logger.info(" SUCCESS ====== "); - return true; - } - return false; - } - +/** Call the Globus API to get info about the transfer. + * + * @param accessToken + * @param taskId - the Globus task id supplied by the user + * @param globusLogger - the transaction-specific logger to use (separate log files are created in general, some calls may use the class logger) + * @return + * @throws MalformedURLException + */ public GlobusTask getTask(String accessToken, String taskId, Logger globusLogger) throws MalformedURLException { URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/" + taskId); @@ -356,6 +313,11 @@ public GlobusTask getTask(String accessToken, String taskId, Logger globusLogger return task; } + /** Globus call to get an access token for the user using the long-term token we hold. + * + * @param globusBasicToken - the base64 encoded Globus Basic token comprised of the : + * @return - a valid Globus access token + */ public static AccessToken getClientToken(String globusBasicToken) { URL url; AccessToken clientTokenUser = null; @@ -375,36 +337,7 @@ public static AccessToken getClientToken(String globusBasicToken) { return clientTokenUser; } - public AccessToken getAccessToken(HttpServletRequest origRequest, String globusBasicToken) - throws UnsupportedEncodingException, MalformedURLException { - String serverName = origRequest.getServerName(); - if (serverName.equals("localhost")) { - logger.severe("Changing localhost to utoronto"); - serverName = "utl-192-123.library.utoronto.ca"; - } - - String redirectURL = "https://" + serverName + "/globus.xhtml"; - - redirectURL = URLEncoder.encode(redirectURL, "UTF-8"); - - URL url = new URL("https://auth.globus.org/v2/oauth2/token?code=" + code + "&redirect_uri=" + redirectURL - + "&grant_type=authorization_code"); - logger.info(url.toString()); - - MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null); - AccessToken accessTokenUser = null; - - if (result.status == 200) { - logger.info("Access Token: \n" + result.toString()); - accessTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); - logger.info(accessTokenUser.getAccessToken()); - } - - return accessTokenUser; - - } - - public static MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, + private static MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, String jsonString) { String str = null; HttpURLConnection connection = null; @@ -412,9 +345,8 @@ public static MakeRequestResponse makeRequest(URL url, String authType, String a try { connection = (HttpURLConnection) url.openConnection(); // Basic - // NThjMGYxNDQtN2QzMy00ZTYzLTk3MmUtMjljNjY5YzJjNGJiOktzSUVDMDZtTUxlRHNKTDBsTmRibXBIbjZvaWpQNGkwWVVuRmQyVDZRSnc9 logger.info(authType + " " + authCode); - logger.info("For URL: " + url.toString()); + logger.fine("For URL: " + url.toString()); connection.setRequestProperty("Authorization", authType + " " + authCode); // connection.setRequestProperty("Content-Type", // "application/x-www-form-urlencoded"); @@ -422,7 +354,7 @@ public static MakeRequestResponse makeRequest(URL url, String authType, String a if (jsonString != null) { connection.setRequestProperty("Content-Type", "application/json"); connection.setRequestProperty("Accept", "application/json"); - logger.info(jsonString); + logger.fine(jsonString); connection.setDoOutput(true); OutputStreamWriter wr = new OutputStreamWriter(connection.getOutputStream()); @@ -431,24 +363,21 @@ public static MakeRequestResponse makeRequest(URL url, String authType, String a } status = connection.getResponseCode(); - logger.info("Status now " + status); + logger.fine("Status now " + status); InputStream result = connection.getInputStream(); if (result != null) { - logger.info("Result is not null"); str = readResultJson(result).toString(); - logger.info("str is "); - logger.info(result.toString()); + logger.fine("str is " + result.toString()); } else { - logger.info("Result is null"); + logger.fine("Result is null"); str = null; } - logger.info("status: " + status); + logger.fine("status: " + status); } catch (IOException ex) { - logger.info("IO"); logger.severe(ex.getMessage()); - logger.info(ex.getCause().toString()); - logger.info(ex.getStackTrace().toString()); + logger.fine(ex.getCause().toString()); + logger.fine(ex.getStackTrace().toString()); } finally { if (connection != null) { connection.disconnect(); @@ -461,16 +390,14 @@ public static MakeRequestResponse makeRequest(URL url, String authType, String a private static StringBuilder readResultJson(InputStream in) { StringBuilder sb = null; - try { - - BufferedReader br = new BufferedReader(new InputStreamReader(in)); + try (BufferedReader br = new BufferedReader(new InputStreamReader(in))) { sb = new StringBuilder(); String line; while ((line = br.readLine()) != null) { sb.append(line + "\n"); } br.close(); - logger.info(sb.toString()); + logger.fine(sb.toString()); } catch (IOException e) { sb = null; logger.severe(e.getMessage()); @@ -495,31 +422,6 @@ private static T parseJson(String sb, Class jsonParserClass, boolean nami } } - public String getDirectory(String datasetId) { - Dataset dataset = null; - String directory = null; - try { - dataset = datasetSvc.find(Long.parseLong(datasetId)); - if (dataset == null) { - logger.severe("Dataset not found " + datasetId); - return null; - } - String storeId = dataset.getStorageIdentifier(); - storeId.substring(storeId.indexOf("//") + 1); - directory = storeId.substring(storeId.indexOf("//") + 1); - logger.info(storeId); - logger.info(directory); - logger.info("Storage identifier:" + dataset.getIdentifierForFileStorage()); - return directory; - - } catch (NumberFormatException nfe) { - logger.severe(nfe.getMessage()); - - return null; - } - - } - static class MakeRequestResponse { public String jsonResponse; public int status; @@ -531,53 +433,26 @@ static class MakeRequestResponse { } - /* - * unused - may be needed for S3 case private MakeRequestResponse - * findDirectory(String directory, String clientToken, String globusEndpoint) - * throws MalformedURLException { URL url = new - * URL(" https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint - * + "/ls?path=" + directory + "/"); - * - * MakeRequestResponse result = makeRequest(url, "Bearer", clientToken, "GET", - * null); logger.info("find directory status:" + result.status); - * - * return result; } - */ - /* - * public boolean giveGlobusPublicPermissions(Dataset dataset) throws - * UnsupportedEncodingException, MalformedURLException { - * - * GlobusEndpoint endpoint = getGlobusEndpoint(dataset); - * - * - * MakeRequestResponse status = findDirectory(endpoint.getBasePath(), - * endpoint.getClientToken(), endpoint.getId()); - * - * if (status.status == 200) { - * - * int perStatus = givePermission("all_authenticated_users", "", "r", dataset); - * logger.info("givePermission status " + perStatus); if (perStatus == 409) { - * logger.info("Permissions already exist or limit was reached"); } else if - * (perStatus == 400) { logger.info("No directory in Globus"); } else if - * (perStatus != 201 && perStatus != 200) { - * logger.info("Cannot give read permission"); return false; } + /** + * Cache of open download Requests This cache keeps track of the set of files + * selected for transfer out (download) via Globus. It is a means of + * transferring the list from the DatasetPage, where it is generated via user UI + * actions, and the Datasets/globusDownloadParameters API. * - * } else if (status.status == 404) { - * logger.info("There is no globus directory"); } else { - * logger.severe("Cannot find directory in globus, status " + status); return - * false; } + * Nominally, the dataverse-globus app will call that API endpoint and then + * /requestGlobusDownload, at which point the cached info is sent to the app. If + * the app doesn't call within 5 minutes (the time allowed to call + * /globusDownloadParameters) + GLOBUS_CACHE_MAXAGE minutes (a ~longer period + * giving the user time to make choices in the app), the cached info is deleted. * - * return true; } */ - - // Single cache of open rules/permission requests private final Cache downloadCache = Caffeine.newBuilder() .expireAfterWrite( - Duration.of(JvmSettings.GLOBUS_RULES_CACHE_MAXAGE.lookup(Integer.class) + 5, ChronoUnit.MINUTES)) + Duration.of(JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class) + 5, ChronoUnit.MINUTES)) .scheduler(Scheduler.systemScheduler()).evictionListener((downloadId, datasetId, cause) -> { // Delete downloads that expire - logger.info("Download for " + downloadId + " expired"); + logger.fine("Download for " + downloadId + " expired"); }) .build(); @@ -600,11 +475,18 @@ public int setPermissionForDownload(Dataset dataset, String principal) { return requestPermission(endpoint, dataset, permissions); } - // Generates the URL to launch the Globus app + // Generates the URL to launch the Globus app for upload public String getGlobusAppUrlForDataset(Dataset d) { return getGlobusAppUrlForDataset(d, true, null); } + /** Generated the App URl for upload (in) or download (out) + * + * @param d - the dataset involved + * @param upload - boolean, true for upload, false for download + * @param dataFiles - a list of the DataFiles to be downloaded + * @return + */ public String getGlobusAppUrlForDataset(Dataset d, boolean upload, List dataFiles) { String localeCode = session.getLocaleCode(); ApiToken apiToken = null; @@ -654,7 +536,7 @@ public String getGlobusAppUrlForDataset(Dataset d, boolean upload, List dataFiles, Dataset d) { return filesBuilder.build(); } - public String getGlobusDownloadScript(Dataset dataset, ApiToken apiToken, List downloadDFList) { + private String getGlobusDownloadScript(Dataset dataset, ApiToken apiToken, List downloadDFList) { return URLTokenUtil.getScriptForUrl(getGlobusAppUrlForDataset(dataset, false, downloadDFList)); } @@ -718,7 +600,7 @@ public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, S GlobusEndpoint endpoint = getGlobusEndpoint(dataset); GlobusTask task = getTask(endpoint.getClientToken(), taskIdentifier, globusLogger); String ruleId = getRuleId(endpoint, task.getOwner_id(), "rw"); - logger.info("Found rule: " + ruleId); + logger.fine("Found rule: " + ruleId); if (ruleId != null) { Long datasetId = rulesCache.getIfPresent(ruleId); if (datasetId != null) { @@ -812,8 +694,8 @@ public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, S // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, globusLogger); JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); - logger.info("Size: " + newfilesJsonArray.size()); - logger.info("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0))); + logger.fine("Size: " + newfilesJsonArray.size()); + logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0))); JsonArrayBuilder jsonDataSecondAPI = Json.createArrayBuilder(); for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { @@ -1227,198 +1109,8 @@ public String calculatemime(String fileName) throws InterruptedException { return finalType; } - /* - * public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) - * throws MalformedURLException { - * - * logger.info("=====Tasklist == dataset id :" + dataset.getId()); String - * directory = null; - * - * try { - * - * List fileMetadatas = new ArrayList<>(); - * - * StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - * - * - * - * DatasetVersion workingVersion = dataset.getEditVersion(); - * - * if (workingVersion.getCreateTime() != null) { - * workingVersion.setCreateTime(new Timestamp(new Date().getTime())); } - * - * directory = dataset.getAuthorityForFileStorage() + "/" + - * dataset.getIdentifierForFileStorage(); - * - * System.out.println("======= directory ==== " + directory + - * " ==== datasetId :" + dataset.getId()); Map checksumMapOld - * = new HashMap<>(); - * - * Iterator fmIt = workingVersion.getFileMetadatas().iterator(); - * - * while (fmIt.hasNext()) { FileMetadata fm = fmIt.next(); if (fm.getDataFile() - * != null && fm.getDataFile().getId() != null) { String chksum = - * fm.getDataFile().getChecksumValue(); if (chksum != null) { - * checksumMapOld.put(chksum, 1); } } } - * - * List dFileList = new ArrayList<>(); boolean update = false; for - * (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { - * - * String s3ObjectKey = s3ObjectSummary.getKey(); - * - * - * String t = s3ObjectKey.replace(directory, ""); - * - * if (t.indexOf(".") > 0) { long totalSize = s3ObjectSummary.getSize(); String - * filePath = s3ObjectKey; String fileName = - * filePath.split("/")[filePath.split("/").length - 1]; String fullPath = - * datasetSIO.getStorageLocation() + "/" + fileName; - * - * logger.info("Full path " + fullPath); StorageIO dataFileStorageIO = - * DataAccess.getDirectStorageIO(fullPath); InputStream in = - * dataFileStorageIO.getInputStream(); - * - * String checksumVal = FileUtil.calculateChecksum(in, - * DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); - * logger.info("The checksum is " + checksumVal); if - * ((checksumMapOld.get(checksumVal) != null)) { logger.info("datasetId :" + - * dataset.getId() + "======= filename ==== " + filePath + - * " == file already exists "); } else if (filePath.contains("cached") || - * filePath.contains(".thumb")) { logger.info(filePath + " is ignored"); } else - * { update = true; logger.info("datasetId :" + dataset.getId() + - * "======= filename ==== " + filePath + " == new file "); try { - * - * DataFile datafile = new DataFile(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE); - * //MIME_TYPE_GLOBUS datafile.setModificationTime(new Timestamp(new - * Date().getTime())); datafile.setCreateDate(new Timestamp(new - * Date().getTime())); datafile.setPermissionModificationTime(new Timestamp(new - * Date().getTime())); - * - * FileMetadata fmd = new FileMetadata(); - * - * - * fmd.setLabel(fileName); fmd.setDirectoryLabel(filePath.replace(directory, - * "").replace(File.separator + fileName, "")); - * - * fmd.setDataFile(datafile); - * - * datafile.getFileMetadatas().add(fmd); - * - * FileUtil.generateS3PackageStorageIdentifierForGlobus(datafile); - * logger.info("==== datasetId :" + dataset.getId() + "======= filename ==== " - * + filePath + " == added to datafile, filemetadata "); - * - * try { // We persist "SHA1" rather than "SHA-1". - * //datafile.setChecksumType(DataFile.ChecksumType.SHA1); - * datafile.setChecksumType(DataFile.ChecksumType.MD5); - * datafile.setChecksumValue(checksumVal); } catch (Exception cksumEx) { - * logger.info("==== datasetId :" + dataset.getId() + - * "======Could not calculate checksumType signature for the new file "); } - * - * datafile.setFilesize(totalSize); - * - * dFileList.add(datafile); - * - * } catch (Exception ioex) { logger.info("datasetId :" + dataset.getId() + - * "======Failed to process and/or save the file " + ioex.getMessage()); return - * false; - * - * } } } } if (update) { - * - * List filesAdded = new ArrayList<>(); - * - * if (dFileList != null && dFileList.size() > 0) { - * - * // Dataset dataset = version.getDataset(); - * - * for (DataFile dataFile : dFileList) { - * - * if (dataFile.getOwner() == null) { dataFile.setOwner(dataset); - * - * workingVersion.getFileMetadatas().add(dataFile.getFileMetadata()); - * dataFile.getFileMetadata().setDatasetVersion(workingVersion); - * dataset.getFiles().add(dataFile); - * - * } - * - * filesAdded.add(dataFile); - * - * } - * - * logger.info("==== datasetId :" + dataset.getId() + - * " ===== Done! Finished saving new files to the dataset."); } - * - * fileMetadatas.clear(); for (DataFile addedFile : filesAdded) { - * fileMetadatas.add(addedFile.getFileMetadata()); } filesAdded = null; - * - * if (workingVersion.isDraft()) { - * - * logger.info("Async: ==== datasetId :" + dataset.getId() + - * " ==== inside draft version "); - * - * Timestamp updateTime = new Timestamp(new Date().getTime()); - * - * workingVersion.setLastUpdateTime(updateTime); - * dataset.setModificationTime(updateTime); - * - * - * for (FileMetadata fileMetadata : fileMetadatas) { - * - * if (fileMetadata.getDataFile().getCreateDate() == null) { - * fileMetadata.getDataFile().setCreateDate(updateTime); - * fileMetadata.getDataFile().setCreator((AuthenticatedUser) user); } - * fileMetadata.getDataFile().setModificationTime(updateTime); } - * - * - * } else { logger.info("datasetId :" + dataset.getId() + - * " ==== inside released version "); - * - * for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) { for - * (FileMetadata fileMetadata : fileMetadatas) { if - * (fileMetadata.getDataFile().getStorageIdentifier() != null) { - * - * if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion. - * getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) { - * workingVersion.getFileMetadatas().set(i, fileMetadata); } } } } - * - * - * } - * - * - * try { Command cmd; logger.info("Async: ==== datasetId :" + - * dataset.getId() + - * " ======= UpdateDatasetVersionCommand START in globus function "); cmd = new - * UpdateDatasetVersionCommand(dataset, new DataverseRequest(user, - * (HttpServletRequest) null)); ((UpdateDatasetVersionCommand) - * cmd).setValidateLenient(true); //new DataverseRequest(authenticatedUser, - * (HttpServletRequest) null) //dvRequestService.getDataverseRequest() - * commandEngine.submit(cmd); } catch (CommandException ex) { - * logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + - * "======CommandException updating DatasetVersion from batch job: " + - * ex.getMessage()); return false; } - * - * logger.info("==== datasetId :" + dataset.getId() + - * " ======= GLOBUS CALL COMPLETED SUCCESSFULLY "); - * - * //return true; } - * - * } catch (Exception e) { String message = e.getMessage(); - * - * logger.info("==== datasetId :" + dataset.getId() + - * " ======= GLOBUS CALL Exception ============== " + message); - * e.printStackTrace(); return false; //return - * error(Response.Status.INTERNAL_SERVER_ERROR, - * "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" - * + message + "'."); } - * - * String globusBasicToken = - * settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, ""); - * AccessToken clientTokenUser = getClientToken(globusBasicToken); - * updatePermision(clientTokenUser, directory, "identity", "r"); return true; } - * - */ - GlobusEndpoint getGlobusEndpoint(DvObject dvObject) { + private GlobusEndpoint getGlobusEndpoint(DvObject dvObject) { Dataset dataset = null; if (dvObject instanceof Dataset) { dataset = (Dataset) dvObject; @@ -1435,8 +1127,6 @@ GlobusEndpoint getGlobusEndpoint(DvObject dvObject) { if (GlobusAccessibleStore.isDataverseManaged(driverId) && (dataset != null)) { directoryPath = directoryPath + "/" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); - logger.info("directoryPath now: " + directoryPath); - } else { // remote store - may have path in file storageidentifier String relPath = dvObject.getStorageIdentifier() @@ -1446,17 +1136,16 @@ GlobusEndpoint getGlobusEndpoint(DvObject dvObject) { directoryPath = directoryPath + relPath.substring(0, filenameStart); } } - logger.info("directoryPath finally: " + directoryPath); + logger.fine("directoryPath finally: " + directoryPath); String endpointId = GlobusAccessibleStore.getTransferEndpointId(driverId); - logger.info("endpointId: " + endpointId); + logger.fine("endpointId: " + endpointId); String globusToken = GlobusAccessibleStore.getGlobusToken(driverId); AccessToken accessToken = GlobusServiceBean.getClientToken(globusToken); String clientToken = accessToken.getOtherTokens().get(0).getAccessToken(); - logger.info("clientToken: " + clientToken); endpoint = new GlobusEndpoint(endpointId, clientToken, directoryPath); return endpoint; @@ -1484,7 +1173,7 @@ public void writeGuestbookAndStartTransfer(GuestbookResponse guestbookResponse, DataFile df = guestbookResponse.getDataFile(); if (df != null) { - logger.info("Single datafile case for writeGuestbookAndStartTransfer"); + logger.fine("Single datafile case for writeGuestbookAndStartTransfer"); List downloadDFList = new ArrayList(1); downloadDFList.add(df); if (!doNotSaveGuestbookResponse) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index c9038047611..96a56d09c0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -50,7 +50,7 @@ public enum JvmSettings { UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"), GUESTBOOK_AT_REQUEST(SCOPE_FILES, "guestbook-at-request"), - GLOBUS_RULES_CACHE_MAXAGE(SCOPE_FILES, "globus-rules-cache-maxage"), + GLOBUS_CACHE_MAXAGE(SCOPE_FILES, "globus-rules-cache-maxage"), FILES(SCOPE_FILES), BASE_URL(FILES, "base-url"), GLOBUS_TOKEN(FILES, "globus-token"), diff --git a/src/main/webapp/globus.xhtml b/src/main/webapp/globus.xhtml deleted file mode 100644 index f4eebd4babf..00000000000 --- a/src/main/webapp/globus.xhtml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - - - - - - - - - - From caa6e684390bb4c36dff45f1de94837f8b632f57 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 4 Dec 2023 18:29:22 -0500 Subject: [PATCH 0379/1112] revert unrelated changes, old settings --- .../harvest/server/web/servlet/OAIServlet.java | 15 ++++++++++----- .../iq/dataverse/settings/JvmSettings.java | 5 +---- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index 19901cae796..96a19acc0e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -73,13 +73,18 @@ public class OAIServlet extends HttpServlet { @EJB SystemConfig systemConfig; + + @Inject + @ConfigProperty(name = "dataverse.oai.server.maxidentifiers", defaultValue="100") + private Integer maxListIdentifiers; - //Todo - revert this change - added to get past some local compile issues - private Integer maxListIdentifiers=100; - - private Integer maxListSets=100; + @Inject + @ConfigProperty(name = "dataverse.oai.server.maxsets", defaultValue="100") + private Integer maxListSets; - private Integer maxListRecords=10; + @Inject + @ConfigProperty(name = "dataverse.oai.server.maxrecords", defaultValue="10") + private Integer maxListRecords; private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.server.web.servlet.OAIServlet"); // If we are going to stick with this solution - of providing a minimalist diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 96a56d09c0b..fb85ae9adab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -50,10 +50,7 @@ public enum JvmSettings { UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"), GUESTBOOK_AT_REQUEST(SCOPE_FILES, "guestbook-at-request"), - GLOBUS_CACHE_MAXAGE(SCOPE_FILES, "globus-rules-cache-maxage"), - FILES(SCOPE_FILES), - BASE_URL(FILES, "base-url"), - GLOBUS_TOKEN(FILES, "globus-token"), + GLOBUS_CACHE_MAXAGE(SCOPE_FILES, "globus-cache-maxage"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), From 3babc5aac25710dcc92a90ae861a7b21eef43742 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 4 Dec 2023 20:35:56 -0500 Subject: [PATCH 0380/1112] moving the StorageUse member to DvObjectContainer from DvObject; moving the em.merge()/em.persist() to the djb. #8549 --- .../java/edu/harvard/iq/dataverse/DataFile.java | 17 ----------------- .../iq/dataverse/DataverseServiceBean.java | 17 ++++++++++++++++- .../java/edu/harvard/iq/dataverse/DvObject.java | 14 -------------- .../harvard/iq/dataverse/DvObjectContainer.java | 14 ++++++++++++-- .../command/impl/SetCollectionQuotaCommand.java | 15 +-------------- .../storageuse/StorageUseServiceBean.java | 1 - 6 files changed, 29 insertions(+), 49 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 2770118d41b..3d8086b142b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -641,23 +641,6 @@ public String getFriendlySize() { } } - /** - * Experimental - record the pre-calculated "storage size" of the file, and - * all its associated auxiliary file objects: - - @Column(nullable = true) - private Long storageSize; - - - public Long getStorageSize() { - return storageSize; - } - - public void setStorageSize(Long storageSize) { - this.storageSize = storageSize; - } - * */ - public boolean isRestricted() { return restricted; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 549b8310122..487215c7a65 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.search.SolrSearchResult; +import edu.harvard.iq.dataverse.storageuse.StorageQuota; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.File; @@ -919,5 +920,19 @@ public List getDatasetTitlesWithinDataverse(Long dataverseId) { return em.createNativeQuery(cqString).getResultList(); } - + public void saveStorageQuota(Dataverse target, Long allocation) { + StorageQuota storageQuota = target.getStorageQuota(); + + if (storageQuota != null) { + storageQuota.setAllocation(allocation); + em.merge(storageQuota); + } else { + storageQuota = new StorageQuota(); + storageQuota.setDefinitionPoint(target); + storageQuota.setAllocation(allocation); + target.setStorageQuota(storageQuota); + em.persist(storageQuota); + } + em.flush(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 515d9f9f153..df249e04663 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.storageuse.StorageQuota; -import edu.harvard.iq.dataverse.storageuse.StorageUse; import java.sql.Timestamp; import java.text.SimpleDateFormat; @@ -182,10 +181,6 @@ public void setAlternativePersistentIndentifiers(Set roleAssignments; - /** - * Should only be used in constructors for DvObjectContainers (Datasets and - * Collections), to make sure new entries are created and persisted in the - * database StorageUse table for every DvObject container we create. - * @param storageUse - */ - public void setStorageUse(StorageUse storageUse) { - this.storageUse = storageUse; - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index 2f391e394fa..82057315fbb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -2,11 +2,9 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.settings.JvmSettings; -import edu.harvard.iq.dataverse.storageuse.StorageQuota; import edu.harvard.iq.dataverse.storageuse.StorageUse; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.persistence.CascadeType; -import java.util.Locale; import java.util.Optional; import jakarta.persistence.MappedSuperclass; @@ -45,6 +43,9 @@ public boolean isEffectivelyPermissionRoot() { private Boolean guestbookAtRequest = null; + @OneToOne(mappedBy = "dvObjectContainer",cascade={ CascadeType.REMOVE, CascadeType.PERSIST}, orphanRemoval=true) + private StorageUse storageUse; + public String getEffectiveStorageDriverId() { String id = storageDriver; if (StringUtils.isBlank(id)) { @@ -165,4 +166,13 @@ public void setCurationLabelSetName(String setName) { this.externalLabelSetName = setName; } + /** + * Should only be used in constructors for DvObjectContainers (Datasets and + * Collections), to make sure new entries are created and persisted in the + * database StorageUse table for every DvObject container we create. + * @param storageUse + */ + public void setStorageUse(StorageUse storageUse) { + this.storageUse = storageUse; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java index cf8fb6fd42e..e52c47a5e7d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java @@ -9,7 +9,6 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; -import edu.harvard.iq.dataverse.storageuse.StorageQuota; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.logging.Logger; @@ -49,18 +48,6 @@ public void executeImpl(CommandContext ctxt) throws CommandException { throw new IllegalCommandException("Must specify valid allocation in bytes", this); } - StorageQuota storageQuota = dataverse.getStorageQuota(); - - if (storageQuota != null) { - storageQuota.setAllocation(allocation); - ctxt.em().merge(storageQuota); - } else { - storageQuota = new StorageQuota(); - storageQuota.setDefinitionPoint(dataverse); - storageQuota.setAllocation(allocation); - dataverse.setStorageQuota(storageQuota); - ctxt.em().persist(storageQuota); - } - ctxt.em().flush(); + ctxt.dataverses().saveStorageQuota(dataverse, allocation); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java index 18e4ef49640..fbaaff22dee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.storageuse; -import edu.harvard.iq.dataverse.DvObjectContainer; import edu.harvard.iq.dataverse.settings.JvmSettings; import jakarta.ejb.Stateless; import jakarta.ejb.TransactionAttribute; From dfa2dc3853254bc8c58bedbfd288a63bcfa07b32 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Dec 2023 04:38:46 -0500 Subject: [PATCH 0381/1112] remove adaptation for quotas PR that was itself changed --- .../impl/CreateNewDataFilesCommand.java | 24 ++----------------- 1 file changed, 2 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 269ba47643b..0470f59b861 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -3,20 +3,18 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -85,7 +83,7 @@ public class CreateNewDataFilesCommand extends AbstractCommand sio; - try { - sio = DataAccess.getDirectStorageIO(DataAccess.getLocationFromStorageId(newStorageIdentifier, version.getDataset())); - - // get file size - // Note - some stores (e.g. AWS S3) only offer eventual consistency and a call - // to get the size immediately after uploading may fail. As of the addition of - // PR#9409 adding storage quotas, we are now requiring size to be available - // earlier. If this is seen, adding - // a delay/retry may help - newFileSize = sio.retrieveSizeFromMedia(); - } catch (IOException e) { - // If we don't get a file size, a CommandExecutionException will be thrown later in the code - e.printStackTrace(); - } - } } // Finally, if none of the special cases above were applicable (or // if we were unable to unpack an uploaded file, etc.), we'll just From c78613e60ca7a2442753d6382b0ace3c7fd07316 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 5 Dec 2023 08:42:23 -0500 Subject: [PATCH 0382/1112] one more refinement for the flyway script. #8549 --- .../storageuse/StorageUseServiceBean.java | 33 ++++++++++--------- .../V6.0.0.5__8549-collection-quotas.sql | 13 ++++++++ 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java index fbaaff22dee..7aea7a7b596 100644 --- a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java @@ -46,23 +46,24 @@ public Long findStorageSizeByDvContainerId(Long dvObjectId) { */ @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void incrementStorageSizeRecursively(Long dvObjectContainerId, Long increment) { - //@todo should throw exceptions if either parameter is null - Optional allow = JvmSettings.STORAGEUSE_DISABLE_UPDATES.lookupOptional(Boolean.class); - if (!(allow.isPresent() && allow.get())) { - String queryString = "WITH RECURSIVE uptree (id, owner_id) AS\n" - + "(" - + " SELECT id, owner_id\n" - + " FROM dvobject\n" - + " WHERE id=" + dvObjectContainerId + "\n" - + " UNION ALL\n" - + " SELECT dvobject.id, dvobject.owner_id\n" - + " FROM dvobject\n" - + " JOIN uptree ON dvobject.id = uptree.owner_id)\n" - + "UPDATE storageuse SET sizeinbytes=COALESCE(sizeinbytes,0)+" + increment + "\n" - + "FROM uptree\n" - + "WHERE dvobjectcontainer_id = uptree.id;"; + if (dvObjectContainerId != null && increment != null) { + Optional allow = JvmSettings.STORAGEUSE_DISABLE_UPDATES.lookupOptional(Boolean.class); + if (!(allow.isPresent() && allow.get())) { + String queryString = "WITH RECURSIVE uptree (id, owner_id) AS\n" + + "(" + + " SELECT id, owner_id\n" + + " FROM dvobject\n" + + " WHERE id=" + dvObjectContainerId + "\n" + + " UNION ALL\n" + + " SELECT dvobject.id, dvobject.owner_id\n" + + " FROM dvobject\n" + + " JOIN uptree ON dvobject.id = uptree.owner_id)\n" + + "UPDATE storageuse SET sizeinbytes=COALESCE(sizeinbytes,0)+" + increment + "\n" + + "FROM uptree\n" + + "WHERE dvobjectcontainer_id = uptree.id;"; - int parentsUpdated = em.createNativeQuery(queryString).executeUpdate(); + int parentsUpdated = em.createNativeQuery(queryString).executeUpdate(); + } } // @todo throw an exception if the number of parent dvobjects updated by // the query is < 2 - ? diff --git a/src/main/resources/db/migration/V6.0.0.5__8549-collection-quotas.sql b/src/main/resources/db/migration/V6.0.0.5__8549-collection-quotas.sql index 3657642c267..d6c067056ec 100644 --- a/src/main/resources/db/migration/V6.0.0.5__8549-collection-quotas.sql +++ b/src/main/resources/db/migration/V6.0.0.5__8549-collection-quotas.sql @@ -38,6 +38,19 @@ AND fileobject.id = file.id AND dt.datafile_id = file.id GROUP BY datasetobject.id) o, dataset ds WHERE o.id = dvobject.id AND dvobject.dtype='Dataset' AND dvobject.id = ds.id AND ds.harvestingclient_id IS null; +-- there may also be some auxiliary files registered in the database, such as +-- the content generated and deposited by external tools - diff. privacy stats +-- being one of the example. These are also considered the "payload" files that +-- we want to count for the purposes of calculating storage use. +UPDATE dvobject SET tempStorageSize=tempStorageSize+o.combinedStorageSize +FROM (SELECT datasetobject.id, COALESCE(SUM(aux.fileSize),0) AS combinedStorageSize +FROM dvobject fileobject, dvobject datasetobject, datafile file, auxiliaryFile aux +WHERE fileobject.owner_id = datasetobject.id +AND fileobject.id = file.id +AND aux.datafile_id = file.id +GROUP BY datasetobject.id) o, dataset ds WHERE o.id = dvobject.id AND dvobject.dtype='Dataset' AND dvobject.id = ds.id AND ds.harvestingclient_id IS null; + + -- ... and then we can repeat the same for collections, by setting the storage size -- to the sum of the storage sizes of the datasets *directly* in each collection: -- (no attemp is made yet to recursively count the sizes all the chilld sub-collections) From 0c02b15aab711acbfb7f2c957c4482313b3997b9 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 5 Dec 2023 09:50:33 -0500 Subject: [PATCH 0383/1112] try QDR /logo endpoint --- .../edu/harvard/iq/dataverse/api/Datasets.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index af6059cf882..828ba218cc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1971,6 +1971,22 @@ public Response getDatasetThumbnail(@PathParam("id") String idSupplied) { } } + @GET + @Produces({ "image/png" }) + @Path("{id}/logo") + public Response getDatasetLogo(@PathParam("id") String idSupplied) { + try { + Dataset dataset = findDatasetOrDie(idSupplied); + InputStream is = DatasetUtil.getLogoAsInputStream(dataset); + if (is == null) { + return notFound("Logo not available"); + } + return Response.ok(is).build(); + } catch (WrappedResponse wr) { + return notFound("Logo not available"); + } + } + // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers. @POST @AuthRequired From 8c9f1242d53aea5ecc906bd4a2a3f5d12a884224 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 5 Dec 2023 10:13:53 -0500 Subject: [PATCH 0384/1112] switch minio to creds jenkins expects #6783 --- docker-compose-dev.yml | 9 ++++----- .../java/edu/harvard/iq/dataverse/api/S3AccessIT.java | 4 ++-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 6bc50f7e764..98376e255dd 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -41,8 +41,8 @@ services: -Ddataverse.files.minio1.path-style-access=true -Ddataverse.files.minio1.upload-redirect=false -Ddataverse.files.minio1.download-redirect=false - -Ddataverse.files.minio1.access-key=minioadmin - -Ddataverse.files.minio1.secret-key=minioadmin + -Ddataverse.files.minio1.access-key=4cc355_k3y + -Ddataverse.files.minio1.secret-key=s3cr3t_4cc355_k35 ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) @@ -211,9 +211,8 @@ services: volumes: - minio_storage:/data environment: - # these are the defaults but are here for clarity - MINIO_ROOT_USER: minioadmin - MINIO_ROOT_PASSWORD: minioadmin + MINIO_ROOT_USER: 4cc355_k3y + MINIO_ROOT_PASSWORD: s3cr3t_4cc355_k35 command: server /data networks: diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index f5e4ce6a794..daf04bb3d14 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -54,8 +54,8 @@ public static void setUp() { .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyLocalStack, secretKeyLocalStack))) .withEndpointConfiguration(new EndpointConfiguration("s3.localhost.localstack.cloud:4566", Regions.US_EAST_2.getName())).build(); - String accessKeyMinio = "minioadmin"; - String secretKeyMinio = "minioadmin"; + String accessKeyMinio = "4cc355_k3y"; + String secretKeyMinio = "s3cr3t_4cc355_k35"; s3minio = AmazonS3ClientBuilder.standard() // https://stackoverflow.com/questions/72205086/amazonss3client-throws-unknownhostexception-if-attempting-to-connect-to-a-local .withPathStyleAccessEnabled(Boolean.TRUE) From 6a7d8d1c6f76c8e54f9759f643204aa339c5bdd0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 5 Dec 2023 10:33:19 -0500 Subject: [PATCH 0385/1112] make assertions about users #6783 --- .../java/edu/harvard/iq/dataverse/api/S3AccessIT.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index daf04bb3d14..7c1531cbfaf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -99,9 +99,10 @@ public void testNonDirectUpload() { String driverLabel = "MinIO"; Response createSuperuser = UtilIT.createRandomUser(); + createSuperuser.then().assertThat().statusCode(200); String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); String superusername = UtilIT.getUsernameFromResponse(createSuperuser); - UtilIT.makeSuperUser(superusername); + UtilIT.makeSuperUser(superusername).then().assertThat().statusCode(200); Response storageDrivers = listStorageDrivers(superuserApiToken); storageDrivers.prettyPrint(); // TODO where is "Local/local" coming from? @@ -118,6 +119,7 @@ public void testNonDirectUpload() { //create user who will make a dataverse/dataset Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(200); String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -208,9 +210,10 @@ public void testDirectUpload() { String driverId = "localstack1"; String driverLabel = "LocalStack"; Response createSuperuser = UtilIT.createRandomUser(); + createSuperuser.then().assertThat().statusCode(200); String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); String superusername = UtilIT.getUsernameFromResponse(createSuperuser); - UtilIT.makeSuperUser(superusername); + UtilIT.makeSuperUser(superusername).then().assertThat().statusCode(200); Response storageDrivers = listStorageDrivers(superuserApiToken); storageDrivers.prettyPrint(); // TODO where is "Local/local" coming from? @@ -227,6 +230,7 @@ public void testDirectUpload() { //create user who will make a dataverse/dataset Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(200); String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); From b9f48913e498ec96ef8f5994c21e7bb549e747e0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 5 Dec 2023 10:41:45 -0500 Subject: [PATCH 0386/1112] move methods to UtilIT #6783 --- .../harvard/iq/dataverse/api/S3AccessIT.java | 75 +++---------------- .../edu/harvard/iq/dataverse/api/UtilIT.java | 50 +++++++++++++ 2 files changed, 62 insertions(+), 63 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index 7c1531cbfaf..1306c30d9c1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -103,7 +103,7 @@ public void testNonDirectUpload() { String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); String superusername = UtilIT.getUsernameFromResponse(createSuperuser); UtilIT.makeSuperUser(superusername).then().assertThat().statusCode(200); - Response storageDrivers = listStorageDrivers(superuserApiToken); + Response storageDrivers = UtilIT.listStorageDrivers(superuserApiToken); storageDrivers.prettyPrint(); // TODO where is "Local/local" coming from? String drivers = """ @@ -127,18 +127,18 @@ public void testNonDirectUpload() { createDataverseResponse.prettyPrint(); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - Response originalStorageDriver = getStorageDriver(dataverseAlias, superuserApiToken); + Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); originalStorageDriver.prettyPrint(); originalStorageDriver.then().assertThat() .body("data.message", equalTo("undefined")) .statusCode(200); - Response setStorageDriverToS3 = setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); + Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); setStorageDriverToS3.prettyPrint(); setStorageDriverToS3.then().assertThat() .statusCode(200); - Response updatedStorageDriver = getStorageDriver(dataverseAlias, superuserApiToken); + Response updatedStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); updatedStorageDriver.prettyPrint(); updatedStorageDriver.then().assertThat() .statusCode(200); @@ -214,7 +214,7 @@ public void testDirectUpload() { String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); String superusername = UtilIT.getUsernameFromResponse(createSuperuser); UtilIT.makeSuperUser(superusername).then().assertThat().statusCode(200); - Response storageDrivers = listStorageDrivers(superuserApiToken); + Response storageDrivers = UtilIT.listStorageDrivers(superuserApiToken); storageDrivers.prettyPrint(); // TODO where is "Local/local" coming from? String drivers = """ @@ -238,18 +238,18 @@ public void testDirectUpload() { createDataverseResponse.prettyPrint(); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - Response originalStorageDriver = getStorageDriver(dataverseAlias, superuserApiToken); + Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); originalStorageDriver.prettyPrint(); originalStorageDriver.then().assertThat() .body("data.message", equalTo("undefined")) .statusCode(200); - Response setStorageDriverToS3 = setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); + Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); setStorageDriverToS3.prettyPrint(); setStorageDriverToS3.then().assertThat() .statusCode(200); - Response updatedStorageDriver = getStorageDriver(dataverseAlias, superuserApiToken); + Response updatedStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); updatedStorageDriver.prettyPrint(); updatedStorageDriver.then().assertThat() .statusCode(200); @@ -275,7 +275,7 @@ public void testDirectUpload() { // // String fileId = JsonPath.from(addFileResponse.body().asString()).getString("data.files[0].dataFile.id"); long size = 1000000000l; - Response getUploadUrls = getUploadUrls(datasetPid, size, apiToken); + Response getUploadUrls = UtilIT.getUploadUrls(datasetPid, size, apiToken); getUploadUrls.prettyPrint(); getUploadUrls.then().assertThat().statusCode(200); @@ -298,7 +298,7 @@ public void testDirectUpload() { String contentsOfFile = "foobar"; InputStream inputStream = new ByteArrayInputStream(contentsOfFile.getBytes(StandardCharsets.UTF_8)); - Response uploadFileDirect = uploadFileDirect(localhostUrl, inputStream); + Response uploadFileDirect = UtilIT.uploadFileDirect(localhostUrl, inputStream); uploadFileDirect.prettyPrint(); /* Direct upload to MinIO is failing with errors like this: @@ -357,7 +357,7 @@ public void testDirectUpload() { assertEquals(contentsOfFile, s3Object); System.out.println("direct download..."); - Response getHeaders = downloadFileNoRedirect(Integer.valueOf(fileId), apiToken); + Response getHeaders = UtilIT.downloadFileNoRedirect(Integer.valueOf(fileId), apiToken); for (Header header : getHeaders.getHeaders()) { System.out.println("direct download header: " + header); } @@ -371,7 +371,7 @@ public void testDirectUpload() { } catch (UnsupportedEncodingException ex) { } - Response downloadFile = downloadFromUrl(decodedDownloadUrl); + Response downloadFile = UtilIT.downloadFromUrl(decodedDownloadUrl); downloadFile.prettyPrint(); downloadFile.then().assertThat().statusCode(200); @@ -394,55 +394,4 @@ public void testDirectUpload() { } - //TODO: move these into UtilIT. They are here for now to avoid merge conflicts - static Response listStorageDrivers(String apiToken) { - return given() - .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/admin/dataverse/storageDrivers"); - } - - static Response getStorageDriver(String dvAlias, String apiToken) { - return given() - .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/admin/dataverse/" + dvAlias + "/storageDriver"); - } - - static Response setStorageDriver(String dvAlias, String label, String apiToken) { - return given() - .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken) - .body(label) - .put("/api/admin/dataverse/" + dvAlias + "/storageDriver"); - } - - static Response getUploadUrls(String idOrPersistentIdOfDataset, long sizeInBytes, String apiToken) { - String idInPath = idOrPersistentIdOfDataset; // Assume it's a number. - String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. - if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) { - idInPath = ":persistentId"; - optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset; - } - RequestSpecification requestSpecification = given(); - if (apiToken != null) { - requestSpecification = given() - .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); - } - return requestSpecification.get("/api/datasets/" + idInPath + "/uploadurls?size=" + sizeInBytes + optionalQueryParam); - } - - static Response uploadFileDirect(String url, InputStream inputStream) { - return given() - .header("x-amz-tagging", "dv-state=temp") - .body(inputStream) - .put(url); - } - - static Response downloadFileNoRedirect(Integer fileId, String apiToken) { - return given().when().redirects().follow(false) - .get("/api/access/datafile/" + fileId + "?key=" + apiToken); - } - - static Response downloadFromUrl(String url) { - return given().get(url); - } - } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 9b264086c27..12bb069424f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2361,6 +2361,56 @@ static Response deleteStorageSite(long storageSiteId) { .delete("/api/admin/storageSites/" + storageSiteId); } + static Response listStorageDrivers(String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/admin/dataverse/storageDrivers"); + } + + static Response getStorageDriver(String dvAlias, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/admin/dataverse/" + dvAlias + "/storageDriver"); + } + + static Response setStorageDriver(String dvAlias, String label, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(label) + .put("/api/admin/dataverse/" + dvAlias + "/storageDriver"); + } + + static Response getUploadUrls(String idOrPersistentIdOfDataset, long sizeInBytes, String apiToken) { + String idInPath = idOrPersistentIdOfDataset; // Assume it's a number. + String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. + if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) { + idInPath = ":persistentId"; + optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset; + } + RequestSpecification requestSpecification = given(); + if (apiToken != null) { + requestSpecification = given() + .header(API_TOKEN_HTTP_HEADER, apiToken); + } + return requestSpecification.get("/api/datasets/" + idInPath + "/uploadurls?size=" + sizeInBytes + optionalQueryParam); + } + + static Response uploadFileDirect(String url, InputStream inputStream) { + return given() + .header("x-amz-tagging", "dv-state=temp") + .body(inputStream) + .put(url); + } + + static Response downloadFileNoRedirect(Integer fileId, String apiToken) { + return given().when().redirects().follow(false) + .get("/api/access/datafile/" + fileId + "?key=" + apiToken); + } + + static Response downloadFromUrl(String url) { + return given().get(url); + } + static Response metricsDataversesToMonth(String yyyymm, String queryParams) { String optionalYyyyMm = ""; if (yyyymm != null) { From 7349ed9f754e05ff7b16a24ea8f3c24c060ed593 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Dec 2023 10:43:38 -0500 Subject: [PATCH 0387/1112] get logo, picking 48px size for datafile thumbs FWIW: QDR generates a 400px version here and then uses styling to fit the page. Not sure what the motivation for that was without digging. --- .../iq/dataverse/dataset/DatasetUtil.java | 63 +++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 096f1f87acc..ccf861ebdc8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -411,6 +411,69 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset, int size) { return nonDefaultDatasetThumbnail; } } + + public static InputStream getLogoAsInputStream(Dataset dataset) { + if (dataset == null) { + return null; + } + StorageIO dataAccess = null; + + try { + dataAccess = DataAccess.getStorageIO(dataset); + } catch (IOException ioex) { + logger.warning("getLogo(): Failed to initialize dataset StorageIO for " + dataset.getStorageIdentifier() + + " (" + ioex.getMessage() + ")"); + } + + InputStream in = null; + try { + if (dataAccess == null) { + logger.warning( + "getLogo(): Failed to initialize dataset StorageIO for " + dataset.getStorageIdentifier()); + } else { + in = dataAccess.getAuxFileAsInputStream(datasetLogoFilenameFinal); + } + } catch (IOException ex) { + logger.fine( + "Dataset-level thumbnail file does not exist, or failed to open; will try to find an image file that can be used as the thumbnail."); + } + + if (in == null) { + DataFile thumbnailFile = dataset.getThumbnailFile(); + + if (thumbnailFile == null) { + if (dataset.isUseGenericThumbnail()) { + logger.fine("Dataset (id :" + dataset.getId() + ") does not have a logo and is 'Use Generic'."); + return null; + } else { + thumbnailFile = attemptToAutomaticallySelectThumbnailFromDataFiles(dataset, null); + if (thumbnailFile == null) { + logger.fine("Dataset (id :" + dataset.getId() + + ") does not have a logo available that could be selected automatically."); + return null; + } else { + + } + } + } + if (thumbnailFile.isRestricted()) { + logger.fine("Dataset (id :" + dataset.getId() + + ") has a logo the user selected but the file must have later been restricted. Returning null."); + return null; + } + + try { + in = ImageThumbConverter.getImageThumbnailAsInputStream(thumbnailFile.getStorageIO(), + ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE).getInputStream(); + } catch (IOException ioex) { + logger.warning("getLogo(): Failed to get logo from DataFile for " + dataset.getStorageIdentifier() + + " (" + ioex.getMessage() + ")"); + ioex.printStackTrace(); + } + + } + return in; + } /** * The dataset logo is the file that a user uploads which is *not* one of From 6f1cd087624fea70a1c37425aacaf05c9d7ba0bf Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 5 Dec 2023 15:53:21 +0000 Subject: [PATCH 0388/1112] Added: checks before calling getFileMetadatas on canDownloadAtLeastOneFile method in PermissionServiceBean --- .../iq/dataverse/PermissionServiceBean.java | 51 ++++++++++++++++++- 1 file changed, 49 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 9e6628617ce..2e4627576c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -41,6 +41,9 @@ import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; import jakarta.persistence.Query; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.Root; /** * Your one-stop-shop for deciding which user can do what action on which @@ -837,12 +840,56 @@ public boolean isMatchingWorkflowLock(Dataset d, String userId, String invocatio return false; } - public boolean canDownloadAtLeastOneFile(User requestUser, DatasetVersion datasetVersion) { + /** + * Checks if a User can download at least one file of the target DatasetVersion. + * + * @param user User to check + * @param datasetVersion DatasetVersion to check + * @return boolean indicating whether the user can download at least one file or not + */ + public boolean canDownloadAtLeastOneFile(User user, DatasetVersion datasetVersion) { + if (user.isSuperuser()) { + return true; + } + if (hasReleasedFiles(datasetVersion)) { + return true; + } for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) { - if (userOn(requestUser, fileMetadata.getDataFile()).has(Permission.DownloadFile)) { + if (userOn(user, fileMetadata.getDataFile()).has(Permission.DownloadFile)) { return true; } } return false; } + + /** + * Checks if a DatasetVersion has released files. + * + * This method is mostly based on {@link #isPublicallyDownloadable(DvObject)} although in this case, instead of basing + * the search on a particular file, it searches for the total number of files in the target version that are present + * in the released version. + * + * @param targetDatasetVersion DatasetVersion to check + * @return boolean indicating whether the dataset version has released files or not + */ + private boolean hasReleasedFiles(DatasetVersion targetDatasetVersion) { + Dataset targetDataset = targetDatasetVersion.getDataset(); + if (!targetDataset.isReleased()) { + return false; + } + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root datasetVersionRoot = criteriaQuery.from(DatasetVersion.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + criteriaQuery + .select(criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + criteriaBuilder.equal(fileMetadataRoot.get("dataFile").get("restricted"), false), + criteriaBuilder.equal(datasetVersionRoot.get("dataset"), targetDataset), + criteriaBuilder.equal(datasetVersionRoot.get("versionState"), DatasetVersion.VersionState.RELEASED), + fileMetadataRoot.in(targetDatasetVersion.getFileMetadatas()), + fileMetadataRoot.in(datasetVersionRoot.get("fileMetadatas")))); + Long result = em.createQuery(criteriaQuery).getSingleResult(); + return result > 0; + } } From c194d74b2029917de050fe5d40b237b23bddf3ab Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 5 Dec 2023 10:59:46 -0500 Subject: [PATCH 0389/1112] Clarified the sentence about the initial deployment in the release note. #8549 --- doc/release-notes/8549-collection-quotas.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/8549-collection-quotas.md b/doc/release-notes/8549-collection-quotas.md index 29b84213cfb..b3635d0c5a1 100644 --- a/doc/release-notes/8549-collection-quotas.md +++ b/doc/release-notes/8549-collection-quotas.md @@ -1,3 +1,3 @@ This release adds support for defining storage size quotas for collections. Please see the API guide for details. This is an experimental feature that has not yet been used in production on any real life Dataverse instance, but we are planning to try it out at Harvard/IQSS. -Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the deployment. +Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the first, initial deployment of 6.1 From cf7e664e626994419ca3a1c80785290da7efe683 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 5 Dec 2023 12:02:41 -0500 Subject: [PATCH 0390/1112] moved the entitymanager calls from a command to the service #8549 --- .../edu/harvard/iq/dataverse/DataverseServiceBean.java | 8 ++++++++ .../engine/command/impl/DeleteCollectionQuotaCommand.java | 4 +--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 487215c7a65..b6e666e8058 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -935,4 +935,12 @@ public void saveStorageQuota(Dataverse target, Long allocation) { } em.flush(); } + + public void disableStorageQuota(StorageQuota storageQuota) { + if (storageQuota != null && storageQuota.getAllocation() != null) { + storageQuota.setAllocation(null); + em.merge(storageQuota); + em.flush(); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java index 4015228366b..c0f863686da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java @@ -46,9 +46,7 @@ public void executeImpl(CommandContext ctxt) throws CommandException { StorageQuota storageQuota = targetDataverse.getStorageQuota(); if (storageQuota != null && storageQuota.getAllocation() != null) { - storageQuota.setAllocation(null); - ctxt.em().merge(storageQuota); - ctxt.em().flush(); + ctxt.dataverses().disableStorageQuota(storageQuota); } // ... and if no quota was enabled on the collection - nothing to do = success } From 6a4a9ab3d625f1e5835b3e119449f8fd88eaee23 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 5 Dec 2023 12:10:48 -0500 Subject: [PATCH 0391/1112] stub out diagnosing jenkins failures #10101 --- doc/sphinx-guides/source/qa/jenkins.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/doc/sphinx-guides/source/qa/jenkins.md b/doc/sphinx-guides/source/qa/jenkins.md index a4ca4d8688f..9259284beb9 100644 --- a/doc/sphinx-guides/source/qa/jenkins.md +++ b/doc/sphinx-guides/source/qa/jenkins.md @@ -42,3 +42,18 @@ How can you know if API tests are passing? Here are the steps, by way of example - Click "Test Result". - Under "All Tests", look at the duration for "edu.harvard.iq.dataverse.api". It should be ten minutes or higher. If it was only a few seconds, tests did not run. - Assuming tests ran, if there were failures, they should appear at the top under "All Failed Tests". Inform the author of the pull request about the error. + +## Diagnosing Failures + +API test failures can have multiple causes. As described above, from the "Test Result" page, you might see the failure under "All Failed Tests". However, the test could have failed because of some underlying system issue. + +If you have determined that the API tests have not run at all, your next step should be to click on "Console Output". For example, . Click "Full log" to see the full log in the browser or navigate to (for example) to get a plain text version. + +Go to the end of the log and then scroll up, looking for the failure. A failed Ansible task can look like this: + +``` +TASK [dataverse : download payara zip] ***************************************** +fatal: [localhost]: FAILED! => {"changed": false, "dest": "/tmp/payara.zip", "elapsed": 10, "msg": "Request failed: ", "url": "https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip"} +``` + +In the example above, if Payara can't be downloaded, we're obviously going to have problems deploying Dataverse to it! From dfa49c3720f866f36df0b6cd712f1c5144dfee44 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Dec 2023 14:31:32 -0500 Subject: [PATCH 0392/1112] rename flyway script --- ...thumb-failures.sql => V6.0.0.6__9506-track-thumb-failures.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.5__9506-track-thumb-failures.sql => V6.0.0.6__9506-track-thumb-failures.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.5__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V6.0.0.6__9506-track-thumb-failures.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.5__9506-track-thumb-failures.sql rename to src/main/resources/db/migration/V6.0.0.6__9506-track-thumb-failures.sql From 70a3442cc9a6c672ef8a553be8b279b3b8ea1b52 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 5 Dec 2023 14:36:21 -0500 Subject: [PATCH 0393/1112] updated aux. file service bean #8549 --- .../dataverse/AuxiliaryFileServiceBean.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java index 8c96f98ce39..363622ba3bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java @@ -2,6 +2,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.storageuse.StorageUseServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -46,6 +47,8 @@ public class AuxiliaryFileServiceBean implements java.io.Serializable { @EJB private SystemConfig systemConfig; + @EJB + StorageUseServiceBean storageUseService; public AuxiliaryFile find(Object pk) { return em.find(AuxiliaryFile.class, pk); @@ -126,6 +129,13 @@ public AuxiliaryFile processAuxiliaryFile(InputStream fileInputStream, DataFile } dataFile.getAuxiliaryFiles().add(auxFile); } + // We've just added this file to storage; increment the StorageUse + // record if needed. + if (auxFile.getFileSize() != null + && auxFile.getFileSize() > 0 + && dataFile.getOwner() != null ) { + storageUseService.incrementStorageSizeRecursively(dataFile.getOwner().getId(), auxFile.getFileSize()); + } } catch (IOException ioex) { logger.severe("IO Exception trying to save auxiliary file: " + ioex.getMessage()); throw new InternalServerErrorException(); @@ -181,6 +191,7 @@ public void deleteAuxiliaryFile(DataFile dataFile, String formatTag, String form if (af == null) { throw new FileNotFoundException(); } + Long auxFileSize = af.getFileSize(); em.remove(af); StorageIO storageIO; storageIO = dataFile.getStorageIO(); @@ -188,6 +199,14 @@ public void deleteAuxiliaryFile(DataFile dataFile, String formatTag, String form if (storageIO.isAuxObjectCached(auxExtension)) { storageIO.deleteAuxObject(auxExtension); } + // We've just deleted this file from storage; update the StorageUse + // record if needed. + if (auxFileSize != null + && auxFileSize > 0 + && dataFile.getOwner() != null) { + storageUseService.incrementStorageSizeRecursively(dataFile.getOwner().getId(), (0L - auxFileSize)); + } + } public List findAuxiliaryFiles(DataFile dataFile) { From c54a85fca9377b74efc0e74e8a70a6de2f6fccc4 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 5 Dec 2023 14:52:23 -0500 Subject: [PATCH 0394/1112] #9464 add caveats to release note. --- doc/release-notes/9464-json-validation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9464-json-validation.md b/doc/release-notes/9464-json-validation.md index 4b08f2ca9dd..f104263ba35 100644 --- a/doc/release-notes/9464-json-validation.md +++ b/doc/release-notes/9464-json-validation.md @@ -1,3 +1,3 @@ -Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. (Issue #9464 and #9465) +Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release funtionality is limited to json format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) For documentation see the API changelog: http://preview.guides.gdcc.io/en/develop/api/changelog.html From 2379828c2737260901b23020a436f5cab6cc962a Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 5 Dec 2023 15:05:12 -0500 Subject: [PATCH 0395/1112] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2d37c3b07ae..29aa7c880ac 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -510,7 +510,9 @@ The fully expanded example above (without environment variables) looks like this Retrieve a Dataset JSON Schema for a Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Retrieves a JSON schema customized for a given collection in order to validate a dataset JSON file prior to creating the dataset: +Retrieves a JSON schema customized for a given collection in order to validate a dataset JSON file prior to creating the dataset. This +first version of the schema only includes required elements and fields. In the future we plan to improve the schema by adding controlled +vocabulary and more robust dataset field format testing: .. code-block:: bash @@ -535,7 +537,8 @@ While it is recommended to download a copy of the JSON Schema from the collectio Validate Dataset JSON File for a Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Validates a dataset JSON file customized for a given collection prior to creating the dataset: +Validates a dataset JSON file customized for a given collection prior to creating the dataset. The validation only tests for json formatting +and the presence of required elements: .. code-block:: bash From dd2d9726e3125975493fa6dbf70578d76fa5f07c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Dec 2023 16:47:04 -0500 Subject: [PATCH 0396/1112] globus store options --- .../source/installation/config.rst | 50 +++++++++++++++++-- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 7b32da8f6c3..e0e4d4cd89e 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -499,14 +499,14 @@ Logging & Slow Performance .. _file-storage: -File Storage: Using a Local Filesystem and/or Swift and/or Object Stores and/or Trusted Remote Stores ------------------------------------------------------------------------------------------------------ +File Storage: Using a Local Filesystem and/or Swift and/or Object Stores and/or Trusted Remote Stores and/or Globus Stores +-------------------------------------------------------------------------------------------------------------------------- By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara6/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\.directory`` JVM option described below. -A Dataverse installation can alternately store files in a Swift or S3-compatible object store, and can now be configured to support multiple stores at once. With a multi-store configuration, the location for new files can be controlled on a per-Dataverse collection basis. +A Dataverse installation can alternately store files in a Swift or S3-compatible object store, or on a Globus endpoint, and can now be configured to support multiple stores at once. With a multi-store configuration, the location for new files can be controlled on a per-Dataverse collection basis. -A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. +A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web or Globus accessible trusted remote store. A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. By default, Dataverse supports uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). @@ -958,7 +958,7 @@ Once you have configured a trusted remote store, you can point your users to the dataverse.files..type ``remote`` **Required** to mark this storage as remote. (none) dataverse.files..label **Required** label to be shown in the UI for this storage. (none) dataverse.files..base-url **Required** All files must have URLs of the form /* . (none) - dataverse.files..base-store **Optional** The id of a base store (of type file, s3, or swift). (the default store) + dataverse.files..base-store **Required** The id of a base store (of type file, s3, or swift). (the default store) dataverse.files..download-redirect ``true``/``false`` Enable direct download (should usually be true). ``false`` dataverse.files..secret-key A key used to sign download requests sent to the remote store. Optional. (none) dataverse.files..url-expiration-minutes If direct downloads and using signing: time until links expire. Optional. 60 @@ -967,6 +967,46 @@ Once you have configured a trusted remote store, you can point your users to the =========================================== ================== ========================================================================== =================== +.. _globus-storage: + +Globus Storage +++++++++++++++ + +Globus stores allow Dataverse to manage files stored in Globus endpoints or to reference files in remote Globus endpoints, with users leveraging Globus to transfer files to/from Dataverse (rather than using HTTP/HTTPS). +See :doc:`/developers/big-data-support` for additional information on how to use a globus store. Consult the `Globus documentation `_ for information about using Globus and configuring Globus endpoints. + +In addition to having the type "globus" and requiring a label, Globus Stores share many options with Trusted Remote Stores and options to specify and access a Globus endpoint(s). As with Remote Stores, Globus Stores also use a baseStore - a file, s3, or swift store that can be used to store additional ancillary dataset files (e.g. metadata exports, thumbnails, auxiliary files, etc.). +These and other available options are described in the table below. + +There are two types of Globus stores +- managed - where Dataverse manages the Globus endpoint, deciding where transferred files are stored and managing access control for users transferring files to/from Dataverse +- remote - where Dataverse references files that remain on trusted remote Globus endpoints + +For managed stores, there are two variants, connecting to standard/file-based Globus endpoints and to endpoints using an underlying S3 store via the Globus S3 Connector. +With the former, Dataverse has no direct access to the file contents and functionality related to ingest, fixity hash validation, etc. are not available. With the latter, Dataverse can access files internally via S3 and the functionality supported is similar to that when using S3 direct upload. + +Once you have configured a globus store, it is recommended that you install the `dataverse-globus app `_ to allow transfers in/out of Dataverse to be initated via the Dataverse user interface. Alternately, you can point your users to the :doc:`/developers/globus-api` for information about API support. + +.. table:: + :align: left + + ======================================================= ================== ========================================================================== =================== + JVM Option Value Description Default value + ======================================================= ================== ========================================================================== =================== + dataverse.files..type ``globus`` **Required** to mark this storage as globus enabled. (none) + dataverse.files..label **Required** label to be shown in the UI for this storage. (none) + dataverse.files..base-store **Required** The id of a base store (of type file, s3, or swift). (the default store) + dataverse.files..remote-store-name A short name used in the UI to indicate where a file is located. Optional. (none) + dataverse.files..remote-store-url A url to an info page about the remote store used in the UI. Optional. (none) + dataverse.files..managed ``true``/``false`` Whether dataverse manages an associated Globus endpoint ``false`` + dataverse.files..transfer-endpoint-with-basepath The *managed* Globus endpoint id and associated base path for file storage (none) + dataverse.files..globus-token A Globus token (base64 endcoded : + for a managed store) - using a microprofile alias is recommended (none) + dataverse.files..reference-endpoints-with-basepaths A comma separated list of *remote* trusted Globus endpoint id/s (none) + dataverse.files..files-not-accessible-by-dataverse ``true``/``false`` Should be true for S3 Connector-based *managed* stores ``false`` + + ======================================================= ================== ========================================================================== =================== + .. _temporary-file-storage: Temporary Upload File Storage From 4d7818a7be615033bd00261a6a0951c703c0ad3b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Dec 2023 16:59:04 -0500 Subject: [PATCH 0397/1112] merge miss --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 13ec049fa0a..8afc365417e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -87,7 +87,7 @@ /* Amazon AWS S3 driver */ -public class S3AccessIO extends StorageIO implements GlobusAccessibleStore { +public class S3AccessIO extends StorageIO { private static final Config config = ConfigProvider.getConfig(); private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.S3AccessIO"); @@ -1194,7 +1194,6 @@ private static AmazonS3 getClient(String driverId) { * * if a profile and static credentials are both explicitly set, the profile will be used preferentially, and * * if no store-specific credentials are set, the global credentials will be preferred over using any "default" profile credentials that are found. */ - String s3profile = getConfigParamForDriver(driverId, PROFILE,"default"); ArrayList providers = new ArrayList<>(); From ceacf7e92c045a61b96205536f442dc48142cb2a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Dec 2023 16:59:40 -0500 Subject: [PATCH 0398/1112] add a stub globus api page since it is referenced in the config doc --- .../source/developers/globus-api.rst | 282 ++++++++++++++++++ doc/sphinx-guides/source/developers/index.rst | 1 + 2 files changed, 283 insertions(+) create mode 100644 doc/sphinx-guides/source/developers/globus-api.rst diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst new file mode 100644 index 00000000000..2775ffd2142 --- /dev/null +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -0,0 +1,282 @@ +Globus Transfer API +=================== + +The Globus API addresses three use cases: +* Transfer to a Dataverse-managed Globus endpoint (File-based or using the Globus S3 Connector) +* Reference of files that will remain in a remote Globus endpoint +* Transfer from a Dataverse-managed Globus endpoint + +The ability for Dataverse to interact with Globus endpoints is configured via +Direct upload involves a series of three activities, each involving interacting with the server for a Dataverse installation: + +* Requesting initiation of a transfer from the server +* Use of the pre-signed URL(s) returned in that call to perform an upload/multipart-upload of the file to S3 +* A call to the server to register the file/files as part of the dataset/replace a file in the dataset or to cancel the transfer + +This API is only enabled when a Dataset is configured with a data store supporting direct S3 upload. +Administrators should be aware that partial transfers, where a client starts uploading the file/parts of the file and does not contact the server to complete/cancel the transfer, will result in data stored in S3 that is not referenced in the Dataverse installation (e.g. should be considered temporary and deleted.) + + +Requesting Direct Upload of a DataFile +-------------------------------------- +To initiate a transfer of a file to S3, make a call to the Dataverse installation indicating the size of the file to upload. The response will include a pre-signed URL(s) that allow the client to transfer the file. Pre-signed URLs include a short-lived token authorizing the action represented by the URL. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export SIZE=1000000000 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/uploadurls?persistentId=$PERSISTENT_IDENTIFIER&size=$SIZE" + +The response to this call, assuming direct uploads are enabled, will be one of two forms: + +Single URL: when the file is smaller than the size at which uploads must be broken into multiple parts + +.. code-block:: bash + + { + "status":"OK", + "data":{ + "url":"...", + "partSize":1073741824, + "storageIdentifier":"s3://demo-dataverse-bucket:177883619b8-892ca9f7112e" + } + } + +Multiple URLs: when the file must be uploaded in multiple parts. The part size is set by the Dataverse installation and, for AWS-based storage, range from 5 MB to 5 GB + +.. code-block:: bash + + { + "status":"OK", + "data":{ + "urls":{ + "1":"...", + "2":"...", + "3":"...", + "4":"...", + "5":"..." + } + "abort":"/api/datasets/mpupload?...", + "complete":"/api/datasets/mpupload?..." + "partSize":1073741824, + "storageIdentifier":"s3://demo-dataverse-bucket:177883b000e-49cedef268ac" + } + +In the example responses above, the URLs, which are very long, have been omitted. These URLs reference the S3 server and the specific object identifier that will be used, starting with, for example, https://demo-dataverse-bucket.s3.amazonaws.com/10.5072/FK2FOQPJS/177883b000e-49cedef268ac?... + +The client must then use the URL(s) to PUT the file, or if the file is larger than the specified partSize, parts of the file. + +In the single part case, only one call to the supplied URL is required: + +.. code-block:: bash + + curl -H 'x-amz-tagging:dv-state=temp' -X PUT -T "" + + +In the multipart case, the client must send each part and collect the 'eTag' responses from the server. The calls for this are the same as the one for the single part case except that each call should send a slice of the total file, with the last part containing the remaining bytes. +The responses from the S3 server for these calls will include the 'eTag' for the uploaded part. + +To successfully conclude the multipart upload, the client must call the 'complete' URI, sending a json object including the part eTags: + +.. code-block:: bash + + curl -X PUT "$SERVER_URL/api/datasets/mpload?..." -d '{"1":"","2":"","3":"","4":"","5":""}' + +If the client is unable to complete the multipart upload, it should call the abort URL: + +.. code-block:: bash + + curl -X DELETE "$SERVER_URL/api/datasets/mpload?..." + + +.. _direct-add-to-dataset-api: + +Adding the Uploaded file to the Dataset +--------------------------------------- + +Once the file exists in the s3 bucket, a final API call is needed to add it to the Dataset. This call is the same call used to upload a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. +jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must also include values for: + +* "storageIdentifier" - String, as specified in prior calls +* "fileName" - String +* "mimeType" - String +* fixity/checksum: either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export JSON_DATA="{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}" + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. + +To add multiple Uploaded Files to the Dataset +--------------------------------------------- + +Once the files exists in the s3 bucket, a final API call is needed to add all the files to the Dataset. In this API call, additional metadata is added using the "jsonData" parameter. +jsonData for this call is an array of objects that normally include information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must also include values for: + +* "description" - A description of the file +* "directoryLabel" - The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset +* "storageIdentifier" - String +* "fileName" - String +* "mimeType" - String +* "fixity/checksum" either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV + export JSON_DATA="[{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}, \ + {'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53', 'fileName':'file2.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123789'}}]" + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/addFiles?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. + + +Replacing an existing file in the Dataset +----------------------------------------- + +Once the file exists in the s3 bucket, a final API call is needed to register it as a replacement of an existing file. This call is the same call used to replace a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. +jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, whether to allow the mimetype to change (forceReplace=true), etc. For direct uploads, the jsonData object must include values for: + +* "storageIdentifier" - String, as specified in prior calls +* "fileName" - String +* "mimeType" - String +* fixity/checksum: either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512. +Note that the API call does not validate that the file matches the hash value supplied. If a Dataverse instance is configured to validate file fixity hashes at publication time, a mismatch would be caught at that time and cause publication to fail. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export FILE_IDENTIFIER=5072 + export JSON_DATA='{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "forceReplace":"true", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}' + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA" + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. + +Replacing multiple existing files in the Dataset +------------------------------------------------ + +Once the replacement files exist in the s3 bucket, a final API call is needed to register them as replacements for existing files. In this API call, additional metadata is added using the "jsonData" parameter. +jsonData for this call is array of objects that normally include information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must include some additional values: + +* "fileToReplaceId" - the id of the file being replaced +* "forceReplace" - whether to replace a file with one of a different mimetype (optional, default is false) +* "description" - A description of the file +* "directoryLabel" - The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset +* "storageIdentifier" - String +* "fileName" - String +* "mimeType" - String +* "fixity/checksum" either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV + export JSON_DATA='[{"fileToReplaceId": 10, "description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}},{"fileToReplaceId": 11, "forceReplace": true, "description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/replaceFiles?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" + +The JSON object returned as a response from this API call includes a "data" that indicates how many of the file replacements succeeded and provides per-file error messages for those that don't, e.g. + +.. code-block:: + + { + "status": "OK", + "data": { + "Files": [ + { + "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", + "errorMessage": "Bad Request:The file to replace does not belong to this dataset.", + "fileDetails": { + "fileToReplaceId": 10, + "description": "My description.", + "directoryLabel": "data/subdir1", + "categories": [ + "Data" + ], + "restrict": "false", + "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", + "fileName": "file1.Bin", + "mimeType": "application/octet-stream", + "checksum": { + "@type": "SHA-1", + "@value": "123456" + } + } + }, + { + "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", + "successMessage": "Replaced successfully in the dataset", + "fileDetails": { + "description": "My description.", + "label": "file2.txt", + "restricted": false, + "directoryLabel": "data/subdir1", + "categories": [ + "Data" + ], + "dataFile": { + "persistentId": "", + "pidURL": "", + "filename": "file2.txt", + "contentType": "text/plain", + "filesize": 2407, + "description": "My description.", + "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", + "rootDataFileId": 11, + "previousDataFileId": 11, + "checksum": { + "type": "SHA-1", + "value": "123789" + } + } + } + } + ], + "Result": { + "Total number of files": 2, + "Number of files successfully replaced": 1 + } + } + } + + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index 60d97feeef9..458a78a6c95 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -39,6 +39,7 @@ Developer Guide big-data-support aux-file-support s3-direct-upload-api + globus-api dataset-semantic-metadata-api dataset-migration-api workflows From 03a4c77155934060c33c33ed27ea2f7628301e91 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 10:58:33 +0000 Subject: [PATCH 0399/1112] Refactor: shortcut on datafile permission check --- .../harvard/iq/dataverse/PermissionServiceBean.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 2e4627576c6..107024bcfb9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -851,11 +851,13 @@ public boolean canDownloadAtLeastOneFile(User user, DatasetVersion datasetVersio if (user.isSuperuser()) { return true; } - if (hasReleasedFiles(datasetVersion)) { + if (hasUnrestrictedReleasedFiles(datasetVersion)) { return true; } for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) { - if (userOn(user, fileMetadata.getDataFile()).has(Permission.DownloadFile)) { + DataFile dataFile = fileMetadata.getDataFile(); + Set ras = new HashSet<>(groupService.groupsFor(user, dataFile)); + if (hasGroupPermissionsFor(ras, dataFile, EnumSet.of(Permission.DownloadFile))) { return true; } } @@ -863,7 +865,7 @@ public boolean canDownloadAtLeastOneFile(User user, DatasetVersion datasetVersio } /** - * Checks if a DatasetVersion has released files. + * Checks if a DatasetVersion has unrestricted released files. * * This method is mostly based on {@link #isPublicallyDownloadable(DvObject)} although in this case, instead of basing * the search on a particular file, it searches for the total number of files in the target version that are present @@ -872,7 +874,7 @@ public boolean canDownloadAtLeastOneFile(User user, DatasetVersion datasetVersio * @param targetDatasetVersion DatasetVersion to check * @return boolean indicating whether the dataset version has released files or not */ - private boolean hasReleasedFiles(DatasetVersion targetDatasetVersion) { + private boolean hasUnrestrictedReleasedFiles(DatasetVersion targetDatasetVersion) { Dataset targetDataset = targetDatasetVersion.getDataset(); if (!targetDataset.isReleased()) { return false; From 326b784da752091bf4c7b3bf4112ebfc327acb69 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 10:59:08 +0000 Subject: [PATCH 0400/1112] Refactor: variable extracted in isPublicallyDownloadable --- .../java/edu/harvard/iq/dataverse/PermissionServiceBean.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 107024bcfb9..1c568e83143 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -451,8 +451,9 @@ private boolean isPublicallyDownloadable(DvObject dvo) { if (!df.isRestricted()) { if (df.getOwner().getReleasedVersion() != null) { - if (df.getOwner().getReleasedVersion().getFileMetadatas() != null) { - for (FileMetadata fm : df.getOwner().getReleasedVersion().getFileMetadatas()) { + List fileMetadatas = df.getOwner().getReleasedVersion().getFileMetadatas(); + if (fileMetadatas != null) { + for (FileMetadata fm : fileMetadatas) { if (df.equals(fm.getDataFile())) { return true; } From 16c685dc30601d8a8b0140cec4b8621e1fe33a99 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 11:22:06 +0000 Subject: [PATCH 0401/1112] Changed: passing DataverseRequest instead of User to canDownloadAtLeastOneFile --- .../harvard/iq/dataverse/PermissionServiceBean.java | 11 ++++++----- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 1c568e83143..e87809ada56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -844,20 +844,21 @@ public boolean isMatchingWorkflowLock(Dataset d, String userId, String invocatio /** * Checks if a User can download at least one file of the target DatasetVersion. * - * @param user User to check + * @param dataverseRequest DataverseRequest to check * @param datasetVersion DatasetVersion to check * @return boolean indicating whether the user can download at least one file or not */ - public boolean canDownloadAtLeastOneFile(User user, DatasetVersion datasetVersion) { - if (user.isSuperuser()) { + public boolean canDownloadAtLeastOneFile(DataverseRequest dataverseRequest, DatasetVersion datasetVersion) { + if (dataverseRequest.getUser().isSuperuser()) { return true; } if (hasUnrestrictedReleasedFiles(datasetVersion)) { return true; } - for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) { + List fileMetadatas = datasetVersion.getFileMetadatas(); + for (FileMetadata fileMetadata : fileMetadatas) { DataFile dataFile = fileMetadata.getDataFile(); - Set ras = new HashSet<>(groupService.groupsFor(user, dataFile)); + Set ras = new HashSet<>(groupService.groupsFor(dataverseRequest, dataFile)); if (hasGroupPermissionsFor(ras, dataFile, EnumSet.of(Permission.DownloadFile))) { return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a9cfefc33d8..6a1e11e690b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4145,7 +4145,7 @@ public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext cr @Context HttpHeaders headers) { return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); - return ok(permissionService.canDownloadAtLeastOneFile(getRequestUser(crc), datasetVersion)); + return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion)); }, getRequestUser(crc)); } } From 8ca2338723a0ec1a57a9affc923fe65229009909 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 11:22:51 +0000 Subject: [PATCH 0402/1112] Fixed: method doc --- .../java/edu/harvard/iq/dataverse/PermissionServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index e87809ada56..359e8823fce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -842,7 +842,7 @@ public boolean isMatchingWorkflowLock(Dataset d, String userId, String invocatio } /** - * Checks if a User can download at least one file of the target DatasetVersion. + * Checks if a DataverseRequest can download at least one file of the target DatasetVersion. * * @param dataverseRequest DataverseRequest to check * @param datasetVersion DatasetVersion to check From 96cd5c9d55437180cfa256df38b0d5990c97ec6c Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 11:24:49 +0000 Subject: [PATCH 0403/1112] Added: explanatory comment --- .../java/edu/harvard/iq/dataverse/PermissionServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 359e8823fce..6dc943f1ca8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -852,6 +852,7 @@ public boolean canDownloadAtLeastOneFile(DataverseRequest dataverseRequest, Data if (dataverseRequest.getUser().isSuperuser()) { return true; } + // This is a shortcut to avoid having to check version files if the condition is met if (hasUnrestrictedReleasedFiles(datasetVersion)) { return true; } From 3c1820b060b303da2bfa97132667ceccb5d5e977 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 11:48:09 +0000 Subject: [PATCH 0404/1112] Added: includeDeaccessioned query param to getCanDownloadAtLeastOneFile API endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 6a1e11e690b..579f4f78fe1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4141,10 +4141,11 @@ public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion)); }, getRequestUser(crc)); } From 811d79a7f8d017745fcfd782b233ec583d3669e2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 6 Dec 2023 08:33:38 -0500 Subject: [PATCH 0405/1112] change minio access key, more l33t #6783 --- docker-compose-dev.yml | 2 +- src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 98376e255dd..e68215d53d2 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -212,7 +212,7 @@ services: - minio_storage:/data environment: MINIO_ROOT_USER: 4cc355_k3y - MINIO_ROOT_PASSWORD: s3cr3t_4cc355_k35 + MINIO_ROOT_PASSWORD: s3cr3t_4cc355_k3y command: server /data networks: diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index 1306c30d9c1..41446349093 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -55,7 +55,7 @@ public static void setUp() { .withEndpointConfiguration(new EndpointConfiguration("s3.localhost.localstack.cloud:4566", Regions.US_EAST_2.getName())).build(); String accessKeyMinio = "4cc355_k3y"; - String secretKeyMinio = "s3cr3t_4cc355_k35"; + String secretKeyMinio = "s3cr3t_4cc355_k3y"; s3minio = AmazonS3ClientBuilder.standard() // https://stackoverflow.com/questions/72205086/amazonss3client-throws-unknownhostexception-if-attempting-to-connect-to-a-local .withPathStyleAccessEnabled(Boolean.TRUE) From e9a670c8620c068419080aad25421afa04641958 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 6 Dec 2023 10:39:09 -0500 Subject: [PATCH 0406/1112] collection not DB #10101 --- doc/sphinx-guides/source/qa/performance-tests.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/qa/performance-tests.md b/doc/sphinx-guides/source/qa/performance-tests.md index f433226d4ff..447c4f6c54d 100644 --- a/doc/sphinx-guides/source/qa/performance-tests.md +++ b/doc/sphinx-guides/source/qa/performance-tests.md @@ -20,4 +20,4 @@ Please note the performance database is also used occasionally by Julian and the Executing the Performance Script -------------------------------- -To execute the performance test script, you need to install a local copy of the database-helper-scripts project at . We have since produced a stripped-down script that calls just the DB and ds and works with python3. +To execute the performance test script, you need to install a local copy of the database-helper-scripts project at . We have since produced a stripped-down script that calls just the collection and dataset and works with Python 3. From a81ad72a0896073e043ee57848e571d7a3754a8a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 6 Dec 2023 10:50:46 -0500 Subject: [PATCH 0407/1112] comment out optional listing of buckets #6783 --- .../harvard/iq/dataverse/api/S3AccessIT.java | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index 41446349093..74150ca120a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -62,16 +62,15 @@ public static void setUp() { .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyMinio, secretKeyMinio))) .withEndpointConfiguration(new EndpointConfiguration("http://localhost:9000", Regions.US_EAST_1.getName())).build(); - System.out.println("buckets on LocalStack before attempting to create " + BUCKET_NAME); - for (Bucket bucket : s3localstack.listBuckets()) { - System.out.println("bucket: " + bucket); - } - - System.out.println("buckets on MinIO before attempting to create " + BUCKET_NAME); - for (Bucket bucket : s3minio.listBuckets()) { - System.out.println("bucket: " + bucket); - } - +// System.out.println("buckets on LocalStack before attempting to create " + BUCKET_NAME); +// for (Bucket bucket : s3localstack.listBuckets()) { +// System.out.println("bucket: " + bucket); +// } +// +// System.out.println("buckets on MinIO before attempting to create " + BUCKET_NAME); +// for (Bucket bucket : s3minio.listBuckets()) { +// System.out.println("bucket: " + bucket); +// } // create bucket if it doesn't exist // Note that we create the localstack bucket with conf/localstack/buckets.sh // because we haven't figured out how to create it properly in Java. From 0bd9f139e5dca2851ca88ed12c5e31af9c5bbfe9 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 6 Dec 2023 11:01:04 -0500 Subject: [PATCH 0408/1112] Update doc/release-notes/6.1-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.1-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index c2b52ab34b8..06a3e01f7af 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -1,6 +1,6 @@ # Dataverse 6.1 -(If this note appears truncated on the GitHub Releases page, you can view it in full in the source tree: https://github.com/IQSS/dataverse/blob/master/doc/release-notes/6.1-release-notes.md) +Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.1 rather than the list of releases, which will cut them off. This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. From c97d7b55e2932dacaa19e4e3ac403c88a25bd2ee Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 11:01:56 -0500 Subject: [PATCH 0409/1112] globus api doc --- .../source/developers/globus-api.rst | 348 ++++++++---------- 1 file changed, 149 insertions(+), 199 deletions(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 2775ffd2142..6a94f220dc2 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -6,277 +6,227 @@ The Globus API addresses three use cases: * Reference of files that will remain in a remote Globus endpoint * Transfer from a Dataverse-managed Globus endpoint -The ability for Dataverse to interact with Globus endpoints is configured via -Direct upload involves a series of three activities, each involving interacting with the server for a Dataverse installation: +The ability for Dataverse to interact with Globus endpoints is configured via a Globus store - see :ref:`globus-storage`. -* Requesting initiation of a transfer from the server -* Use of the pre-signed URL(s) returned in that call to perform an upload/multipart-upload of the file to S3 -* A call to the server to register the file/files as part of the dataset/replace a file in the dataset or to cancel the transfer +Globus transfers (or referencing a remote endpoint) for upload and download transfers involve a series of steps. These can be accomplished using the Dataverse and Globus APIs. (These are used internally by the `dataverse-globus app `_ when transfers are done via the Dataverse UI.) -This API is only enabled when a Dataset is configured with a data store supporting direct S3 upload. -Administrators should be aware that partial transfers, where a client starts uploading the file/parts of the file and does not contact the server to complete/cancel the transfer, will result in data stored in S3 that is not referenced in the Dataverse installation (e.g. should be considered temporary and deleted.) +Requesting Upload or Download Parameters +---------------------------------------- - -Requesting Direct Upload of a DataFile --------------------------------------- -To initiate a transfer of a file to S3, make a call to the Dataverse installation indicating the size of the file to upload. The response will include a pre-signed URL(s) that allow the client to transfer the file. Pre-signed URLs include a short-lived token authorizing the action represented by the URL. +The first step in preparing for a Globus transfer/reference operation is to request the parameters relevant for a given dataset: .. code-block:: bash - export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export SERVER_URL=https://demo.dataverse.org - export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV - export SIZE=1000000000 - - curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/uploadurls?persistentId=$PERSISTENT_IDENTIFIER&size=$SIZE" - -The response to this call, assuming direct uploads are enabled, will be one of two forms: + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/globusUploadParameters?locale=$LOCALE" -Single URL: when the file is smaller than the size at which uploads must be broken into multiple parts +The response will be of the form: .. code-block:: bash { - "status":"OK", - "data":{ - "url":"...", - "partSize":1073741824, - "storageIdentifier":"s3://demo-dataverse-bucket:177883619b8-892ca9f7112e" + "status": "OK", + "data": { + "queryParameters": { + "datasetId": 29, + "siteUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com", + "datasetVersion": ":draft", + "dvLocale": "en", + "datasetPid": "doi:10.5072/FK2/ILLPXE", + "managed": "true", + "endpoint": "d8c42580-6528-4605-9ad8-116a61982644" + }, + "signedUrls": [ + { + "name": "requestGlobusTransferPaths", + "httpMethod": "POST", + "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/requestGlobusUploadPaths?until=2023-11-22T01:52:03.648&user=dataverseAdmin&method=POST&token=63ac4bb748d12078dded1074916508e19e6f6b61f64294d38e0b528010b07d48783cf2e975d7a1cb6d4a3c535f209b981c7c6858bc63afdfc0f8ecc8a139b44a", + "timeOut": 300 + }, + { + "name": "addGlobusFiles", + "httpMethod": "POST", + "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/addGlobusFiles?until=2023-11-22T01:52:03.648&user=dataverseAdmin&method=POST&token=2aaa03f6b9f851a72e112acf584ffc0758ed0cc8d749c5a6f8c20494bb7bc13197ab123e1933f3dde2711f13b347c05e6cec1809a8f0b5484982570198564025", + "timeOut": 300 + }, + { + "name": "getDatasetMetadata", + "httpMethod": "GET", + "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/versions/:draft?until=2023-11-22T01:52:03.649&user=dataverseAdmin&method=GET&token=1878d6a829cd5540e89c07bdaf647f1bea5314cc7a55433b0b506350dd330cad61ade3714a8ee199a7b464fb3b8cddaea0f32a89ac3bfc4a86cd2ea3004ecbb8", + "timeOut": 300 + }, + { + "name": "getFileListing", + "httpMethod": "GET", + "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/versions/:draft/files?until=2023-11-22T01:52:03.650&user=dataverseAdmin&method=GET&token=78e8ca8321624f42602af659227998374ef3788d0feb43d696a0e19086e0f2b3b66b96981903a1565e836416c504b6248cd3c6f7c2644566979bd16e23a99622", + "timeOut": 300 + } + ] + } } - } -Multiple URLs: when the file must be uploaded in multiple parts. The part size is set by the Dataverse installation and, for AWS-based storage, range from 5 MB to 5 GB +The response includes the id for the Globus endpoint to use along with several signed URLs. -.. code-block:: bash +The getDatasetMetadata and getFileListing URLs are just signed versions of the standard Dataset metadata and file listing API calls. The other two are Globus specific. - { - "status":"OK", - "data":{ - "urls":{ - "1":"...", - "2":"...", - "3":"...", - "4":"...", - "5":"..." - } - "abort":"/api/datasets/mpupload?...", - "complete":"/api/datasets/mpupload?..." - "partSize":1073741824, - "storageIdentifier":"s3://demo-dataverse-bucket:177883b000e-49cedef268ac" - } +If called for a dataset using a store that is configured with a remote Globus endpoint(s), the return response is similar but the response includes a +the "managed" parameter will be false, the "endpoint" parameter is replaced with a JSON array of "referenceEndpointsWithPaths" and the +requestGlobusTransferPaths and addGlobusFiles URLs are replaced with ones for requestGlobusReferencePaths and addFiles. All of these calls are +describe further below. + +The call to set up for a transfer out (download) is similar: -In the example responses above, the URLs, which are very long, have been omitted. These URLs reference the S3 server and the specific object identifier that will be used, starting with, for example, https://demo-dataverse-bucket.s3.amazonaws.com/10.5072/FK2FOQPJS/177883b000e-49cedef268ac?... +.. code-block:: bash -The client must then use the URL(s) to PUT the file, or if the file is larger than the specified partSize, parts of the file. + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/globusDownloadParameters?locale=$LOCALE" -In the single part case, only one call to the supplied URL is required: +Note that this API call supports an additional downloadId query parameter. This is only used when the globus-dataverse app is called from the Dataverse user interface. There is no need to use it when calling the API directly. -.. code-block:: bash +The returned response includes the same getDatasetMetadata and getFileListing URLs as in the upload case and includes "monitorGlobusDownload" and "requestGlobusDownload" URLs. The response will also indicate whether the store is "managed" and will provide the "endpoint" from which downloads can be made. - curl -H 'x-amz-tagging:dv-state=temp' -X PUT -T "" +Performing an Upload/Transfer In +-------------------------------- -In the multipart case, the client must send each part and collect the 'eTag' responses from the server. The calls for this are the same as the one for the single part case except that each call should send a slice of the total file, with the last part containing the remaining bytes. -The responses from the S3 server for these calls will include the 'eTag' for the uploaded part. +The information from the API call above can be used to provide a user with information about the dataset and to prepare to transfer or to reference files (based on the "managed" parameter). -To successfully conclude the multipart upload, the client must call the 'complete' URI, sending a json object including the part eTags: +Once the user identifies which files are to be added, the requestGlobusTransferPaths or requestGlobusReferencePaths URLs can be called. These both reference the same API call but must be used with different entries in the JSON body sent: .. code-block:: bash - curl -X PUT "$SERVER_URL/api/datasets/mpload?..." -d '{"1":"","2":"","3":"","4":"","5":""}' - -If the client is unable to complete the multipart upload, it should call the abort URL: + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export LOCALE=en-US + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusUpload" -.. code-block:: bash - - curl -X DELETE "$SERVER_URL/api/datasets/mpload?..." - +Note that when using the dataverse-globus app or the return from the previous call, the URL for this call will be signed and no API_TOKEN is needed. -.. _direct-add-to-dataset-api: +In the managed case, the JSON body sent must include the id of the Globus user that will perform the transfer and the number of files that will be transferred: -Adding the Uploaded file to the Dataset ---------------------------------------- +.. code-block:: bash + { + "principal":"d15d4244-fc10-47f3-a790-85bdb6db9a75", + "numberOfFiles":2 + } -Once the file exists in the s3 bucket, a final API call is needed to add it to the Dataset. This call is the same call used to upload a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. -jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must also include values for: +In the remote reference case, the JSON body sent must include the Globus endpoint/paths that will be referenced: -* "storageIdentifier" - String, as specified in prior calls -* "fileName" - String -* "mimeType" - String -* fixity/checksum: either: +.. code-block:: bash + { + "referencedFiles":[ + "d8c42580-6528-4605-9ad8-116a61982644/hdc1/test1.txt" + ] + } + +The response will include a JSON object. In the managed case, the map is from new assigned file storageidentifiers and specific paths on the managed Globus endpoint: +.. code-block:: bash - * "md5Hash" - String with MD5 hash value, or - * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + { + "status":"OK", + "data":{ + "globusm://18b49d3688c-62137dcb06e4":"/hdc1/10.5072/FK2/ILLPXE/18b49d3688c-62137dcb06e4", + "globusm://18b49d3688c-5c17d575e820":"/hdc1/10.5072/FK2/ILLPXE/18b49d3688c-5c17d575e820" + } + } -The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 +In the managed case, the specified Globus principal is granted write permission to the specified endpoint/path, +which will allow initiation of a transfer from the external endpoint to the managed endpoint using the Globus API. +The permission will be revoked if the transfer is not started and the next call to Dataverse to finish the transfer are not made within a short time (configurable, default of 5 minutes). + +In the remote/reference case, the map is from the initially supplied endpoint/paths to the new assigned file storageidentifiers: .. code-block:: bash - export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export SERVER_URL=https://demo.dataverse.org - export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV - export JSON_DATA="{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}" - - curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" - -Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. - -To add multiple Uploaded Files to the Dataset ---------------------------------------------- + { + "status":"OK", + "data":{ + "d8c42580-6528-4605-9ad8-116a61982644/hdc1/test1.txt":"globus://18bf8c933f4-ed2661e7d19b//d8c42580-6528-4605-9ad8-116a61982644/hdc1/test1.txt" + } + } -Once the files exists in the s3 bucket, a final API call is needed to add all the files to the Dataset. In this API call, additional metadata is added using the "jsonData" parameter. -jsonData for this call is an array of objects that normally include information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must also include values for: -* "description" - A description of the file -* "directoryLabel" - The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset -* "storageIdentifier" - String -* "fileName" - String -* "mimeType" - String -* "fixity/checksum" either: - * "md5Hash" - String with MD5 hash value, or - * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings +Adding Files to the Dataset +--------------------------- -The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 +In the managed case, once a Globus transfer has been initiated a final API call is made to Dataverse to provide it with the task identifier of the transfer and information about the files being transferred: .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV - export JSON_DATA="[{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}, \ - {'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53', 'fileName':'file2.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123789'}}]" + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export JSON_DATA="{"taskIdentifier":"3f530302-6c48-11ee-8428-378be0d9c521", \ + "files": [{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b3972213f-f6b5c2221423", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "1234"}}, \ + {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b39722140-50eb7d3c5ece", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "2345"}}]}" - curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/addFiles?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA"" -Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +Note that the mimetype is multipart/form-data, matching the /addFiles API call. ALso note that the API_TOKEN is not needed when using a signed URL. +With this information, Dataverse will begin to monitor the transfer and when it completes, will add all files for which the transfer succeeded. +As the transfer can take significant time and the API call is asynchronous, the only way to determine if the transfer succeeded via API is to use the standard calls to check the dataset lock state and contents. -Replacing an existing file in the Dataset ------------------------------------------ +Once the transfer completes, Dataverse will remove the write permission for the principal. -Once the file exists in the s3 bucket, a final API call is needed to register it as a replacement of an existing file. This call is the same call used to replace a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. -jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, whether to allow the mimetype to change (forceReplace=true), etc. For direct uploads, the jsonData object must include values for: +Note that when using a managed endpoint that uses the Globus S3 Connector, the checksum should be correct as Dataverse can validate it. For file-based endpoints, the checksum should be included if available but Dataverse cannot verify it. -* "storageIdentifier" - String, as specified in prior calls -* "fileName" - String -* "mimeType" - String -* fixity/checksum: either: +In the remote/reference case, where there is no transfer to monitor, the standard /addFiles API call (see :ref:`direct-add-to-dataset-api`) is used instead. There are no changes for the Globus case. - * "md5Hash" - String with MD5 hash value, or - * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings +Downloading/Transfer Out Via Globus +----------------------------------- -The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512. -Note that the API call does not validate that the file matches the hash value supplied. If a Dataverse instance is configured to validate file fixity hashes at publication time, a mismatch would be caught at that time and cause publication to fail. +To begin downloading files, the requestGlobusDownload URL is used: .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - export FILE_IDENTIFIER=5072 - export JSON_DATA='{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "forceReplace":"true", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}' - - curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA" + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV -Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusDownload" -Replacing multiple existing files in the Dataset ------------------------------------------------- +The JSON body sent should include a list of file ids to download and, for a managed endpoint, the Globus principal that will make the transfer: -Once the replacement files exist in the s3 bucket, a final API call is needed to register them as replacements for existing files. In this API call, additional metadata is added using the "jsonData" parameter. -jsonData for this call is array of objects that normally include information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must include some additional values: +.. code-block:: bash + { + "principal":"d15d4244-fc10-47f3-a790-85bdb6db9a75", + "fileIds":[60, 61] + } + +Note that this API call takes an optional downloadId parameter that is used with the dataverse-globus app. When downloadId is included, the list of fileIds is not needed. -* "fileToReplaceId" - the id of the file being replaced -* "forceReplace" - whether to replace a file with one of a different mimetype (optional, default is false) -* "description" - A description of the file -* "directoryLabel" - The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset -* "storageIdentifier" - String -* "fileName" - String -* "mimeType" - String -* "fixity/checksum" either: +The response is a JSON object mapping the requested file Ids to Globus endpoint/paths. In the managed case, the principal will have been given read permissions for the specified paths: - * "md5Hash" - String with MD5 hash value, or - * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings +.. code-block:: bash + { + "status":"OK", + "data":{ + "60": "d8c42580-6528-4605-9ad8-116a61982644/hdc1/10.5072/FK2/ILLPXE/18bf3af9c78-92b8e168090e", + "61": "d8c42580-6528-4605-9ad8-116a61982644/hdc1/10.5072/FK2/ILLPXE/18bf3af9c78-c8d81569305c" + } + } -The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 +For the remote case, the use can perform the transfer without further contact with Dataverse. In the managed case, the user must initiate the transfer via the Globus API and then inform Dataverse. +Dataverse will then monitor the transfer and revoke the read permission when the transfer is complete. (Not making this last call could result in failure of the transfer.) .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV - export JSON_DATA='[{"fileToReplaceId": 10, "description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}},{"fileToReplaceId": 11, "forceReplace": true, "description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' - - curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/replaceFiles?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" - -The JSON object returned as a response from this API call includes a "data" that indicates how many of the file replacements succeeded and provides per-file error messages for those that don't, e.g. + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/monitorGlobusDownload" + +The JSON body sent just contains the task identifier for the transfer: -.. code-block:: +.. code-block:: bash { - "status": "OK", - "data": { - "Files": [ - { - "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", - "errorMessage": "Bad Request:The file to replace does not belong to this dataset.", - "fileDetails": { - "fileToReplaceId": 10, - "description": "My description.", - "directoryLabel": "data/subdir1", - "categories": [ - "Data" - ], - "restrict": "false", - "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", - "fileName": "file1.Bin", - "mimeType": "application/octet-stream", - "checksum": { - "@type": "SHA-1", - "@value": "123456" - } - } - }, - { - "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", - "successMessage": "Replaced successfully in the dataset", - "fileDetails": { - "description": "My description.", - "label": "file2.txt", - "restricted": false, - "directoryLabel": "data/subdir1", - "categories": [ - "Data" - ], - "dataFile": { - "persistentId": "", - "pidURL": "", - "filename": "file2.txt", - "contentType": "text/plain", - "filesize": 2407, - "description": "My description.", - "storageIdentifier": "s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", - "rootDataFileId": 11, - "previousDataFileId": 11, - "checksum": { - "type": "SHA-1", - "value": "123789" - } - } - } - } - ], - "Result": { - "Total number of files": 2, - "Number of files successfully replaced": 1 - } - } + "taskIdentifier":"b5fd01aa-8963-11ee-83ae-d5484943e99a" } + - -Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. From c7d73f64177745fa7892543407025f9130dcb83b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 11:25:22 -0500 Subject: [PATCH 0410/1112] default for globus-cache-maxage --- src/main/resources/META-INF/microprofile-config.properties | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 504b5e46735..ec8427795ee 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -18,6 +18,7 @@ dataverse.build= dataverse.files.directory=${STORAGE_DIR:/tmp/dataverse} dataverse.files.uploads=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/uploads dataverse.files.docroot=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/docroot +dataverse.files.globus-cache-maxage=5 # SEARCH INDEX dataverse.solr.host=localhost From 1fb7ddf6d89a1b36f9a059f016ac617aa6ec3758 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 11:27:42 -0500 Subject: [PATCH 0411/1112] fix spacing --- doc/sphinx-guides/source/developers/globus-api.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 6a94f220dc2..5b2b6982866 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -105,6 +105,7 @@ Note that when using the dataverse-globus app or the return from the previous ca In the managed case, the JSON body sent must include the id of the Globus user that will perform the transfer and the number of files that will be transferred: .. code-block:: bash + { "principal":"d15d4244-fc10-47f3-a790-85bdb6db9a75", "numberOfFiles":2 @@ -113,6 +114,7 @@ In the managed case, the JSON body sent must include the id of the Globus user t In the remote reference case, the JSON body sent must include the Globus endpoint/paths that will be referenced: .. code-block:: bash + { "referencedFiles":[ "d8c42580-6528-4605-9ad8-116a61982644/hdc1/test1.txt" @@ -120,6 +122,7 @@ In the remote reference case, the JSON body sent must include the Globus endpoin } The response will include a JSON object. In the managed case, the map is from new assigned file storageidentifiers and specific paths on the managed Globus endpoint: + .. code-block:: bash { @@ -161,7 +164,6 @@ In the managed case, once a Globus transfer has been initiated a final API call "files": [{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b3972213f-f6b5c2221423", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "1234"}}, \ {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b39722140-50eb7d3c5ece", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "2345"}}]}" - curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA"" Note that the mimetype is multipart/form-data, matching the /addFiles API call. ALso note that the API_TOKEN is not needed when using a signed URL. @@ -191,6 +193,7 @@ To begin downloading files, the requestGlobusDownload URL is used: The JSON body sent should include a list of file ids to download and, for a managed endpoint, the Globus principal that will make the transfer: .. code-block:: bash + { "principal":"d15d4244-fc10-47f3-a790-85bdb6db9a75", "fileIds":[60, 61] From c2ad0092c545a41f071129bcd85c398775a53a1e Mon Sep 17 00:00:00 2001 From: sbondka Date: Wed, 6 Dec 2023 17:28:40 +0100 Subject: [PATCH 0412/1112] Add modifications --- .../source/_static/admin/dataverse-external-tools.tsv | 1 + doc/sphinx-guides/source/admin/integrations.rst | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 4f4c29d0670..ba60be59227 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -5,3 +5,4 @@ Binder explore dataset Binder allows you to spin up custom computing environment File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. +JupyterHub explore file The `Dataverse-to-JupyterHub Data Transfer Connector `_ is a tool that simplifies the transfer of data between Dataverse repositories and the cloud-based platform JupyterHub. It is designed for researchers, scientists, and data analysts, facilitating collaboration on projects by seamlessly moving datasets and files. The tool is a lightweight client-side web application built using React and relies on the Dataverse External Tool feature, allowing for easy deployment on modern integration systems. Currently optimized for small to medium-sized files, future plans include extending support for larger files and signed Dataverse endpoints. For more details, you can refer to the external tool manifest: https://forgemia.inra.fr/dipso/eosc-pillar/dataverse-jupyterhub-connector/-/blob/master/externalTools.json diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index a9b962f33ca..ed3860a9ca1 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -188,12 +188,12 @@ Researchers can use a Google Sheets add-on to search for Dataverse installation' JupyterHub ++++++++++ -The Dataverse-to-JupyterHub Data Transfer Connector streamlines data transfer between Dataverse repositories and the cloud-based platform JupyterHub, enhancing collaborative research. +The `Dataverse-to-JupyterHub Data Transfer Connector `_ streamlines data transfer between Dataverse repositories and the cloud-based platform JupyterHub, enhancing collaborative research. This connector facilitates seamless two-way transfer of datasets and files, emphasizing the potential of an integrated research environment. It is a lightweight client-side web application built using React and relying on the Dataverse External Tool feature, allowing for easy deployment on modern integration systems. Currently, it supports small to medium-sized files, with plans to enable support for large files and signed Dataverse endpoints in the future. What kind of user is the feature intended for? -The feature is intended for reasearchers, scientists and data analyst working with Dataverse instances and JupyterHub looking to ease the data transfer process. +The feature is intended for researchers, scientists and data analyst who are working with Dataverse instances and JupyterHub looking to ease the data transfer process. .. _integrations-discovery: From a9a8f0cadec9bc3b31f0546805c46cdbf578aef1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 6 Dec 2023 11:37:06 -0500 Subject: [PATCH 0413/1112] clarify it's pages we're hitting #10101 --- doc/sphinx-guides/source/qa/performance-tests.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/qa/performance-tests.md b/doc/sphinx-guides/source/qa/performance-tests.md index 447c4f6c54d..ad7972bd75e 100644 --- a/doc/sphinx-guides/source/qa/performance-tests.md +++ b/doc/sphinx-guides/source/qa/performance-tests.md @@ -20,4 +20,4 @@ Please note the performance database is also used occasionally by Julian and the Executing the Performance Script -------------------------------- -To execute the performance test script, you need to install a local copy of the database-helper-scripts project at . We have since produced a stripped-down script that calls just the collection and dataset and works with Python 3. +To execute the performance test script, you need to install a local copy of the database-helper-scripts project at . We have since produced a stripped-down script that calls just the collection and dataset pages and works with Python 3. From 6fee16dec8125390ea6aa7221a19fde0db2b9730 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 6 Dec 2023 11:52:24 -0500 Subject: [PATCH 0414/1112] #10151 incorporate json schema --- doc/release-notes/6.1-release-notes.md | 6 +++++- doc/release-notes/9464-json-validation.md | 3 --- 2 files changed, 5 insertions(+), 4 deletions(-) delete mode 100644 doc/release-notes/9464-json-validation.md diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 06a3e01f7af..990ba219cad 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -24,7 +24,7 @@ With the upload-out-of-band option enabled, it is also possible for file upload Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` Since Alternative Title is repeatable now, old json apis would not be compatable with a new version since value of alternative title has changed from simple string to an array. -For example, instead "value": "Alternative Title", the value canbe "value": ["Alternative Title1", "Alternative Title2"] +For example, instead "value": "Alternative Title", the value can be "value": ["Alternative Title1", "Alternative Title2"] ### Improvements in the /versions API - optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions @@ -45,6 +45,8 @@ This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/ - deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession): version deaccessioning through API (Given a dataset and a version). - getZipDownloadLimit (/api/info/zipDownloadLimit): Get the configured zip file download limit. The response contains the long value of the limit in bytes. - getMaxEmbargoDurationInMonths (/api/info/settings/:MaxEmbargoDurationInMonths): Get the maximum embargo duration in months, if available, configured through the database setting :MaxEmbargoDurationInMonths. +- getDatasetJsonSchema (/api/dataverses/{id}/datasetSchema): Get a dataset schema with the fields required by a given dataverse collection. +- validateDatasetJsonSchema (/api/dataverses/{id}/validateDatasetJson): Validate that a dataset json file is in proper format and contains the required elements and fields for a given dataverse collection. ### Extended the existing endpoints: - getVersionFiles (/api/datasets/{id}/versions/{versionId}/files): Extended to support optional filtering by search text through the `searchText` query parameter. The search will be applied to the labels and descriptions of the dataset files. Added `tabularTagName` to return files to which the particular tabular tag has been added. Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files. @@ -112,6 +114,8 @@ to generate updated versions. - We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html See also #10060. +- Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release funtionality is limited to json format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) + ### Solr Improvements - As of this release application-side support is added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. diff --git a/doc/release-notes/9464-json-validation.md b/doc/release-notes/9464-json-validation.md deleted file mode 100644 index f104263ba35..00000000000 --- a/doc/release-notes/9464-json-validation.md +++ /dev/null @@ -1,3 +0,0 @@ -Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release funtionality is limited to json format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) - -For documentation see the API changelog: http://preview.guides.gdcc.io/en/develop/api/changelog.html From 15e80aa4c847cb5ce8574fe600723c9cc81a5bc2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 16:56:37 +0000 Subject: [PATCH 0415/1112] Fixed: roleAssignees setup in canDownloadAtLeastOneFile --- .../edu/harvard/iq/dataverse/PermissionServiceBean.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 6dc943f1ca8..471cac31e77 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -849,7 +849,8 @@ public boolean isMatchingWorkflowLock(Dataset d, String userId, String invocatio * @return boolean indicating whether the user can download at least one file or not */ public boolean canDownloadAtLeastOneFile(DataverseRequest dataverseRequest, DatasetVersion datasetVersion) { - if (dataverseRequest.getUser().isSuperuser()) { + User user = dataverseRequest.getUser(); + if (user.isSuperuser()) { return true; } // This is a shortcut to avoid having to check version files if the condition is met @@ -859,8 +860,9 @@ public boolean canDownloadAtLeastOneFile(DataverseRequest dataverseRequest, Data List fileMetadatas = datasetVersion.getFileMetadatas(); for (FileMetadata fileMetadata : fileMetadatas) { DataFile dataFile = fileMetadata.getDataFile(); - Set ras = new HashSet<>(groupService.groupsFor(dataverseRequest, dataFile)); - if (hasGroupPermissionsFor(ras, dataFile, EnumSet.of(Permission.DownloadFile))) { + Set roleAssignees = new HashSet<>(groupService.groupsFor(dataverseRequest, dataFile)); + roleAssignees.add(user); + if (hasGroupPermissionsFor(roleAssignees, dataFile, EnumSet.of(Permission.DownloadFile))) { return true; } } From 4b71b36305fb6c18f7282530dc4491976a352936 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 6 Dec 2023 17:02:07 +0000 Subject: [PATCH 0416/1112] Added: IT for getCanDownloadAtLeastOneFile endpoint --- .../harvard/iq/dataverse/api/DatasetsIT.java | 71 +++++++++++++++---- 1 file changed, 58 insertions(+), 13 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 945b741a94b..3510f2c06ef 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -80,7 +80,6 @@ import javax.xml.stream.XMLStreamReader; import static java.lang.Thread.sleep; -import static org.junit.jupiter.api.Assertions.assertEquals; import org.hamcrest.CoreMatchers; @@ -90,11 +89,7 @@ import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.contains; - -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.*; public class DatasetsIT { @@ -4123,10 +4118,10 @@ public void testGetUserPermissionsOnDataset() { } @Test - public void testGetCanDownloadAtLeastOneFile() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); + public void testGetCanDownloadAtLeastOneFile() throws InterruptedException { + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); @@ -4135,15 +4130,65 @@ public void testGetCanDownloadAtLeastOneFile() { Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); - // Call with valid dataset id - Response canDownloadAtLeastOneFileResponse = UtilIT.getCanDownloadAtLeastOneFile(Integer.toString(datasetId), DS_VERSION_LATEST, apiToken); + // Upload file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String fileId = JsonPath.from(uploadResponse.body().asString()).getString("data.files[0].dataFile.id"); + + // Publish dataset version + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Make sure the dataset is published + Thread.sleep(3000); + + // Create a second user to call the getCanDownloadAtLeastOneFile method + Response createSecondUserResponse = UtilIT.createRandomUser(); + createSecondUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String secondUserApiToken = UtilIT.getApiTokenFromResponse(createSecondUserResponse); + String secondUserUsername = UtilIT.getUsernameFromResponse(createSecondUserResponse); + + // Call with a valid dataset id when a file is released + Response canDownloadAtLeastOneFileResponse = UtilIT.getCanDownloadAtLeastOneFile(Integer.toString(datasetId), DS_VERSION_LATEST, secondUserApiToken); canDownloadAtLeastOneFileResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canDownloadAtLeastOneFile = JsonPath.from(canDownloadAtLeastOneFileResponse.body().asString()).getBoolean("data"); assertTrue(canDownloadAtLeastOneFile); + // Restrict file + Response restrictFileResponse = UtilIT.restrictFile(fileId, true, apiToken); + restrictFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Publish dataset version + publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Make sure the dataset is published + Thread.sleep(3000); + + // Call with a valid dataset id when a file is restricted and the user does not have access + canDownloadAtLeastOneFileResponse = UtilIT.getCanDownloadAtLeastOneFile(Integer.toString(datasetId), DS_VERSION_LATEST, secondUserApiToken); + canDownloadAtLeastOneFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + canDownloadAtLeastOneFile = JsonPath.from(canDownloadAtLeastOneFileResponse.body().asString()).getBoolean("data"); + assertFalse(canDownloadAtLeastOneFile); + + // Grant restricted file access to the user + Response grantFileAccessResponse = UtilIT.grantFileAccess(fileId, "@" + secondUserUsername, apiToken); + grantFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Call with a valid dataset id when a file is restricted and the user has access + canDownloadAtLeastOneFileResponse = UtilIT.getCanDownloadAtLeastOneFile(Integer.toString(datasetId), DS_VERSION_LATEST, secondUserApiToken); + canDownloadAtLeastOneFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + canDownloadAtLeastOneFile = JsonPath.from(canDownloadAtLeastOneFileResponse.body().asString()).getBoolean("data"); + assertTrue(canDownloadAtLeastOneFile); + // Call with invalid dataset id - Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, apiToken); + Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, secondUserApiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } } From 6d2f87ca93c108a9b4ec4905372a2e1709b3f5cf Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 6 Dec 2023 12:24:26 -0500 Subject: [PATCH 0417/1112] adding review comment changes --- doc/release-notes/6.1-release-notes.md | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 990ba219cad..4b5c20f3953 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -12,8 +12,8 @@ This release contains major upgrades to core components. Detailed upgrade instru ## Detailed Release Highlights, New Features and Use Case Scenarios ### Dataverse installation can be now be configured to allow out-of-band upload -- Installation can be now be configured to allow out-of-band upload by setting the `dataverse.files..upload-out-of-band` JVM option to `true`. -By default, Dataverse supports uploading files via the [add a file to a dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). +In some situations, direct upload might not work from the UI, e.g., when s3 storage is not accessible from the internet. This pull request adds an option to [allow direct uploads via API only](https://github.com/IQSS/dataverse/pull/9003). This way, a third party application can use direct upload from within the internal network, while there is no direct download available to the users via UI. +By default, Dataverse supports uploading files via the [add a file to a dataset](https://guides.dataverse.org/en/6.1/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. ### Alternative Title is made repeatable. @@ -23,7 +23,7 @@ With the upload-out-of-band option enabled, it is also possible for file upload Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` -Since Alternative Title is repeatable now, old json apis would not be compatable with a new version since value of alternative title has changed from simple string to an array. +Since Alternative Title is repeatable now, old json apis would not be compatible with a new version since value of alternative title has changed from simple string to an array. For example, instead "value": "Alternative Title", the value can be "value": ["Alternative Title1", "Alternative Title2"] ### Improvements in the /versions API @@ -70,7 +70,6 @@ This parameter applies a filter criteria to the operation and supports the follo - Can delete the dataset draft - getDatasetVersionCitation (/api/datasets/{id}/versions/{versionId}/citation) endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. - ### DataFile API payload has been extended to include the following fields: - tabularData: Boolean field to know if the DataFile is of tabular type - fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) @@ -114,7 +113,7 @@ to generate updated versions. - We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html See also #10060. -- Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release funtionality is limited to json format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) +- Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to json format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) ### Solr Improvements - As of this release application-side support is added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. @@ -125,12 +124,13 @@ Please see the "Installing Solr" section of the Installation Prerequisites guide ### Development - Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools - - There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews - - A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. - SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. - Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. +- `@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is +also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. +- As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`. ## OpenID Connect Authentication Provider Improvements @@ -175,6 +175,8 @@ As part of these testing improvements, the code coverage report file for unit te - dataverse.auth.oidc.subtitle - dataverse.auth.oidc.pkce.max-cache-size - dataverse.auth.oidc.pkce.max-cache-age +- dataverse.files.{driverId}.upload-out-of-band +- dataverse.files.guestbook-at-request ## Installation @@ -182,14 +184,17 @@ If this is a new installation, please follow our [Installation Guide](https://gu Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club! -You are also very welcome to join the [Global Dataverse Community Consortium](https://dataversecommunity.global) (GDCC). +You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC). ## Upgrade Instructions - Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.0. +## Backward Incompatibilities +- Since Alternative Title is repeatable now, old json apis would not be compatible with a new version +- Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility, + ## Complete List of Changes For the complete list of code changes in this release, see the [6.1 Milestone](https://github.com/IQSS/dataverse/milestone/110?closed=1) in GitHub. From 90ff56ca979cd71f1c467ff1cfa0dfeb8f619691 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 6 Dec 2023 12:43:43 -0500 Subject: [PATCH 0418/1112] Update doc/release-notes/6.1-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.1-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 4b5c20f3953..e1a9214a982 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -122,7 +122,7 @@ Please see the "Installing Solr" section of the Installation Prerequisites guide ### Development -- Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. +- Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using Netbeans or IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools - There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews - A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. From 10e0e25fe10dda9f49b6126f591b9483adb2f765 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 6 Dec 2023 12:44:49 -0500 Subject: [PATCH 0419/1112] Update doc/release-notes/6.1-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.1-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index e1a9214a982..427a07a4c2c 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -123,7 +123,7 @@ Please see the "Installing Solr" section of the Installation Prerequisites guide ### Development - Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using Netbeans or IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. -For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools +For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools and [the thread](https://groups.google.com/g/dataverse-community/c/zNBDzSMF2Q0/m/Z-xS6fA2BgAJ) on the mailing list. - There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews - A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. - SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. From 3d55ed31de8fb9e45a2cedfecf07e22c82dae12a Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 6 Dec 2023 12:47:53 -0500 Subject: [PATCH 0420/1112] adding review comment changes --- doc/release-notes/6.1-release-notes.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 427a07a4c2c..189f21f2322 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -19,21 +19,22 @@ With the upload-out-of-band option enabled, it is also possible for file upload ### Alternative Title is made repeatable. - One will need to update database with updated citation block. `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` -- One will also need to update solr schema: - Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` - Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` +- One will also need to update Solr schema: + Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-9.3.0/server/solr/collection1/conf/schema.xml` + Reload Solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` -Since Alternative Title is repeatable now, old json apis would not be compatible with a new version since value of alternative title has changed from simple string to an array. +Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version since value of alternative title has changed from simple string to an array. For example, instead "value": "Alternative Title", the value can be "value": ["Alternative Title1", "Alternative Title2"] -### Improvements in the /versions API +### Improvements in the dataset versions API - optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions - a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output - when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide. -### The following API endpoints have been added: +### The following API endpoints have been added: +- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession): version deaccessioning through API (Given a dataset and a version). - /api/files/{id}/downloadCount - /api/files/{id}/dataTables - /api/files/{id}/metadata/tabularTags New endpoint to set tabular file tags. @@ -42,11 +43,10 @@ This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/ - setFileCategories (/api/files/{id}/metadata/categories): Updates the categories (by name) for an existing file. If the specified categories do not exist, they will be created. - userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file. - hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. -- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession): version deaccessioning through API (Given a dataset and a version). - getZipDownloadLimit (/api/info/zipDownloadLimit): Get the configured zip file download limit. The response contains the long value of the limit in bytes. - getMaxEmbargoDurationInMonths (/api/info/settings/:MaxEmbargoDurationInMonths): Get the maximum embargo duration in months, if available, configured through the database setting :MaxEmbargoDurationInMonths. - getDatasetJsonSchema (/api/dataverses/{id}/datasetSchema): Get a dataset schema with the fields required by a given dataverse collection. -- validateDatasetJsonSchema (/api/dataverses/{id}/validateDatasetJson): Validate that a dataset json file is in proper format and contains the required elements and fields for a given dataverse collection. +- validateDatasetJsonSchema (/api/dataverses/{id}/validateDatasetJson): Validate that a dataset JSON file is in proper format and contains the required elements and fields for a given dataverse collection. ### Extended the existing endpoints: - getVersionFiles (/api/datasets/{id}/versions/{versionId}/files): Extended to support optional filtering by search text through the `searchText` query parameter. The search will be applied to the labels and descriptions of the dataset files. Added `tabularTagName` to return files to which the particular tabular tag has been added. Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files. @@ -113,7 +113,7 @@ to generate updated versions. - We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html See also #10060. -- Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to json format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) +- Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) ### Solr Improvements - As of this release application-side support is added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. @@ -192,7 +192,7 @@ Upgrading requires a maintenance window and downtime. Please plan ahead, create These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.0. ## Backward Incompatibilities -- Since Alternative Title is repeatable now, old json apis would not be compatible with a new version +- Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version - Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility, ## Complete List of Changes From 1be5d4b6b2baddc5f30bf598d81bd5ed991f73ee Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 6 Dec 2023 12:52:39 -0500 Subject: [PATCH 0421/1112] adding review comment changes --- doc/release-notes/6.1-release-notes.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 189f21f2322..d0fe895565c 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -87,10 +87,8 @@ This parameter applies a filter criteria to the operation and supports the follo ### Misc - Configure tools are now available at the dataset level. They appear under the "Edit Dataset" menu. See also #9589. - - Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. - - Dataverse's OAI_ORE Metadata Export format and archival BagIT exports (which include the OAI-ORE metadata export file) have been updated to include information about the dataset version state, e.g. RELEASED or DEACCESSIONED @@ -104,7 +102,7 @@ Dataverse installations that have been using archival Bags may wish to update an existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse [archival Bag export API](https://guides.dataverse.org/en/latest/installation/config.html#bagit-export-api-calls) to generate updated versions. - +- There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews - This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. - To fix #9952, we surround the license info with `<` and `>`. - To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information @@ -124,7 +122,6 @@ Please see the "Installing Solr" section of the Installation Prerequisites guide ### Development - Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using Netbeans or IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools and [the thread](https://groups.google.com/g/dataverse-community/c/zNBDzSMF2Q0/m/Z-xS6fA2BgAJ) on the mailing list. -- There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews - A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. - SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. - Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. From 8e2ff826bdd0f41e598a56012fa780d5f9148a2e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 13:41:35 -0500 Subject: [PATCH 0422/1112] store tests --- .../dataaccess/GlobusOverlayAccessIOTest.java | 148 ++++++++++++++++++ 1 file changed, 148 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java index e69de29bb2d..792a9974076 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java @@ -0,0 +1,148 @@ +/* + * SPDX-License-Identifier: Apache 2.0 + */ +package edu.harvard.iq.dataverse.dataaccess; + +import edu.harvard.iq.dataverse.DOIServiceBean; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.mocks.MocksFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import static org.junit.jupiter.api.Assertions.*; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import java.io.IOException; +import java.nio.file.Paths; + +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +public class GlobusOverlayAccessIOTest { + + @Mock + + private Dataset dataset; + private DataFile mDatafile; + private DataFile rDatafile; + private String baseStoreId1 = "182ad2bda2f-c3508e719076"; + private String baseStoreId2 = "182ad2bda2f-c3508e719077"; + private String logoPath = "d7c42580-6538-4605-9ad8-116a61982644/hdc1/image002.mrc"; + private String authority = "10.5072"; + private String identifier = "F2ABCDEF"; + + @BeforeEach + public void setUp() { + // Base Store + System.setProperty("dataverse.files.base.type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + System.setProperty("dataverse.files.base.label", "default"); + System.setProperty("dataverse.files.base.directory", "/tmp/files"); + + // Managed Globus Store + + // Nonsense endpoint/paths + System.setProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH, + "d7c42580-6538-4605-9ad8-116a61982644/hdc1"); + // Nonsense value of the right form + System.setProperty("dataverse.files.globusm.globus-token", + "NzM2NTQxMDMtOTg1Yy00NDgzLWE1MTYtYTJlNDk0ZmI3MDhkOkpJZGZaZGxMZStQNUo3MTRIMDY2cDh6YzIrOXI2RmMrbFR6UG0zcSsycjA9"); + System.setProperty("dataverse.files.globusm.remote-store-name", "GlobusEndpoint1"); + System.setProperty("dataverse.files.globusm.type", "globus"); + System.setProperty("dataverse.files.globusm.managed", "true"); + System.setProperty("dataverse.files.globusm.base-store", "base"); + System.setProperty("dataverse.files.globusm.label", "globusManaged"); + + // Remote Store + System.setProperty("dataverse.files.globusr.type", "globus"); + System.setProperty("dataverse.files.globusr.base-store", "base"); + System.setProperty("dataverse.files.globusr.managed", "false"); + System.setProperty("dataverse.files.globusm.label", "globusRemote"); + System.setProperty( + "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS, + "d7c42580-6538-4605-9ad8-116a61982644/hdc1"); + System.setProperty("dataverse.files.globusr.remote-store-name", "DemoDataCorp"); + dataset = MocksFactory.makeDataset(); + dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", + DOIServiceBean.DOI_RESOLVER_URL, null)); + mDatafile = MocksFactory.makeDataFile(); + mDatafile.setOwner(dataset); + mDatafile.setStorageIdentifier("globusm://" + baseStoreId1); + + rDatafile = MocksFactory.makeDataFile(); + rDatafile.setOwner(dataset); + rDatafile.setStorageIdentifier("globusr://" + baseStoreId2 + "//" + logoPath); + } + + @AfterEach + public void tearDown() { + System.clearProperty("dataverse.files.base.type"); + System.clearProperty("dataverse.files.base.label"); + System.clearProperty("dataverse.files.base.directory"); + System.clearProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH); + System.clearProperty("dataverse.files.globusm.globus-token"); + System.clearProperty("dataverse.files.globusm.remote-store-name"); + System.clearProperty("dataverse.files.globusm.type"); + System.clearProperty("dataverse.files.globusm.managed"); + System.clearProperty("dataverse.files.globusm.base-store"); + System.clearProperty("dataverse.files.globusm.label"); + System.clearProperty("dataverse.files.globusr.type"); + System.clearProperty("dataverse.files.globusr.base-store"); + System.clearProperty("dataverse.files.globusr.managed"); + System.clearProperty("dataverse.files.globusm.label"); + System.clearProperty( + "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS); + System.clearProperty("dataverse.files.globusr.remote-store-name"); + } + + @Test + void testGlobusOverlayIdentifiers() throws IOException { + assertTrue(GlobusOverlayAccessIO.isValidIdentifier("globusm", mDatafile.getStorageIdentifier())); + assertTrue(GlobusOverlayAccessIO.isValidIdentifier("globusr", rDatafile.getStorageIdentifier())); + assertFalse(GlobusOverlayAccessIO.isValidIdentifier("globusm", "globusr://localid//../of/the/hill")); + assertFalse(GlobusOverlayAccessIO.isValidIdentifier("globusr", + rDatafile.getStorageIdentifier().replace("hdc1", ""))); + + // We can read the storageIdentifier and get the driver + assertTrue(mDatafile.getStorageIdentifier() + .startsWith(DataAccess.getStorageDriverFromIdentifier(mDatafile.getStorageIdentifier()))); + assertTrue(rDatafile.getStorageIdentifier() + .startsWith(DataAccess.getStorageDriverFromIdentifier(rDatafile.getStorageIdentifier()))); + + // We can get the driver type from it's ID + assertTrue(DataAccess.getDriverType("globusm").equals(System.getProperty("dataverse.files.globusm.type"))); + assertTrue(DataAccess.getDriverType("globusr").equals(System.getProperty("dataverse.files.globusr.type"))); + + // When we get a StorageIO for the file, it is the right type + StorageIO mStorageIO = DataAccess.getStorageIO(mDatafile); + assertTrue(mStorageIO instanceof GlobusOverlayAccessIO); + StorageIO rStorageIO = DataAccess.getStorageIO(rDatafile); + assertTrue(rStorageIO instanceof GlobusOverlayAccessIO); + + // When we use it, we can get properties like the remote store name + assertTrue(mStorageIO.getRemoteStoreName() + .equals(System.getProperty("dataverse.files.globusm.remote-store-name"))); + assertTrue(rStorageIO.getRemoteStoreName() + .equals(System.getProperty("dataverse.files.globusr.remote-store-name"))); + + // Storage Locations are correct + String mLocation = mStorageIO.getStorageLocation(); + assertEquals("globusm:///" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + + "/" + baseStoreId1, mLocation); + String rLocation = rStorageIO.getStorageLocation(); + assertEquals("globusr://" + baseStoreId2 + "//" + logoPath, rLocation); + + // If we ask for the path for an aux file, it is correct + System.out.println(Paths.get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority, + identifier, baseStoreId1 + ".auxobject").toString()); + System.out.println(mStorageIO.getAuxObjectAsPath("auxobject").toString()); + assertTrue(Paths.get(System.getProperty("dataverse.files.base.directory", "/tmp/files"), authority, identifier, + baseStoreId1 + ".auxobject").equals(mStorageIO.getAuxObjectAsPath("auxobject"))); + assertTrue(Paths.get(System.getProperty("dataverse.files.base.directory", "/tmp/files"), authority, identifier, + baseStoreId2 + ".auxobject").equals(rStorageIO.getAuxObjectAsPath("auxobject"))); + } +} From 865c9feb4230a0a3bc9880cb6088a563b3fe21fc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 13:53:35 -0500 Subject: [PATCH 0423/1112] getConfig tests --- .../iq/dataverse/dataaccess/StorageIOTest.java | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java index 2ed9d18036d..84a241b90f6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java @@ -243,4 +243,16 @@ public void testGenerateVariableHeader() { assertEquals("Random Random\n", instance.generateVariableHeader(dvs)); assertEquals(null, instance.generateVariableHeader(null)); } + + @Test + public void testGetConfigParam() { + System.setProperty("dataverse.files.globus.type", "globus"); + assertEquals("globus", StorageIO.getConfigParamForDriver("globus", StorageIO.TYPE)); + System.clearProperty("dataverse.files.globus.type"); + } + + @Test + public void testGetConfigParamWithDefault() { + assertEquals(DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER, StorageIO.getConfigParamForDriver("globus", AbstractRemoteOverlayAccessIO.BASE_STORE, DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER)); + } } From cb1beaae490126c2274219dfcb4cae56094b096a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 6 Dec 2023 14:11:15 -0500 Subject: [PATCH 0424/1112] finish changing minio secret key #6783 This should have been part of 811d79a7 --- docker-compose-dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index e68215d53d2..5265a6b7c2d 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -42,7 +42,7 @@ services: -Ddataverse.files.minio1.upload-redirect=false -Ddataverse.files.minio1.download-redirect=false -Ddataverse.files.minio1.access-key=4cc355_k3y - -Ddataverse.files.minio1.secret-key=s3cr3t_4cc355_k35 + -Ddataverse.files.minio1.secret-key=s3cr3t_4cc355_k3y ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) From 5b7a560a380db12d083e82a19a865eb79559e0a4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 14:41:31 -0500 Subject: [PATCH 0425/1112] refactor, test for getFileMap --- .../harvard/iq/dataverse/api/Datasets.java | 3 +- .../AbstractRemoteOverlayAccessIO.java | 2 +- .../dataverse/globus/GlobusServiceBean.java | 134 +++++++++--------- .../iq/dataverse/globus/GlobusUtil.java | 33 +++++ .../dataaccess/GlobusOverlayAccessIOTest.java | 1 - .../iq/dataverse/globus/GlobusUtilTest.java | 88 ++++++++++++ 6 files changed, 190 insertions(+), 71 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/GlobusUtil.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 939ebf1dcd4..b3bfc476423 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -110,6 +110,7 @@ import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.globus.GlobusUtil; import java.io.IOException; import java.io.InputStream; @@ -3996,7 +3997,7 @@ public Response requestGlobusDownload(@Context ContainerRequestContext crc, @Pat } } // Allowed to download all requested files - JsonObject files = globusService.getFilesMap(dataFiles, dataset); + JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset); if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) { // If managed, give the principal read permissions int status = globusService.setPermissionForDownload(dataset, body.getString("principal")); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java index 8d058b7c9e3..6c26502acfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java @@ -42,7 +42,7 @@ public abstract class AbstractRemoteOverlayAccessIO extends StorageIO { protected static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); - protected static final String REFERENCE_ENDPOINTS_WITH_BASEPATHS = "reference-endpoints-with-basepaths"; + public static final String REFERENCE_ENDPOINTS_WITH_BASEPATHS = "reference-endpoints-with-basepaths"; static final String BASE_STORE = "base-store"; protected static final String SECRET_KEY = "secret-key"; static final String URL_EXPIRATION_MINUTES = "url-expiration-minutes"; diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 37959188857..8cc8e491416 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -159,9 +159,11 @@ public void deletePermission(String ruleId, Dataset dataset, Logger globusLogger } } - /** Request read/write access for the specified principal and generate a list of accessible paths for new files for the specified dataset. + /** + * Request read/write access for the specified principal and generate a list of + * accessible paths for new files for the specified dataset. * - * @param principal - the id of the Globus principal doing the transfer + * @param principal - the id of the Globus principal doing the transfer * @param dataset * @param numberOfPaths - how many files are to be transferred * @return @@ -230,10 +232,15 @@ private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissi } } - /** Given an array of remote files to be referenced in the dataset, create a set of valid storage identifiers and return a map of the remote file paths to storage identifiers. + /** + * Given an array of remote files to be referenced in the dataset, create a set + * of valid storage identifiers and return a map of the remote file paths to + * storage identifiers. * * @param dataset - * @param referencedFiles - a JSON array of remote files to be referenced in the dataset - each should be a string with the /path/to/file + * @param referencedFiles - a JSON array of remote files to be referenced in the + * dataset - each should be a string with the /path/to/file * @return - a map of supplied paths to valid storage identifiers */ public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray referencedFiles) { @@ -262,15 +269,17 @@ public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray ref return fileMap.build(); } - - /** A cache of temporary permission requests - for upload (rw) and download (r) access. - * When a temporary permission request is created, it is added to the cache. After GLOBUS_CACHE_MAXAGE minutes, if a transfer has not been started, the permission will be revoked/deleted. - * (If a transfer has been started, the permission will not be revoked/deleted until the transfer is complete. This is handled in other methods.) + /** + * A cache of temporary permission requests - for upload (rw) and download (r) + * access. When a temporary permission request is created, it is added to the + * cache. After GLOBUS_CACHE_MAXAGE minutes, if a transfer has not been started, + * the permission will be revoked/deleted. (If a transfer has been started, the + * permission will not be revoked/deleted until the transfer is complete. This + * is handled in other methods.) */ // Single cache of open rules/permission requests private final Cache rulesCache = Caffeine.newBuilder() - .expireAfterWrite( - Duration.of(JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) + .expireAfterWrite(Duration.of(JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) .scheduler(Scheduler.systemScheduler()).evictionListener((ruleId, datasetId, cause) -> { // Delete rules that expire logger.fine("Rule " + ruleId + " expired"); @@ -280,20 +289,24 @@ public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray ref .build(); - //Convenience method to add a temporary permission request to the cache - allows logging of temporary permission requests + // Convenience method to add a temporary permission request to the cache - + // allows logging of temporary permission requests private void monitorTemporaryPermissions(String ruleId, long datasetId) { logger.fine("Adding rule " + ruleId + " for dataset " + datasetId); rulesCache.put(ruleId, datasetId); } -/** Call the Globus API to get info about the transfer. - * - * @param accessToken - * @param taskId - the Globus task id supplied by the user - * @param globusLogger - the transaction-specific logger to use (separate log files are created in general, some calls may use the class logger) - * @return - * @throws MalformedURLException - */ + /** + * Call the Globus API to get info about the transfer. + * + * @param accessToken + * @param taskId - the Globus task id supplied by the user + * @param globusLogger - the transaction-specific logger to use (separate log + * files are created in general, some calls may use the + * class logger) + * @return + * @throws MalformedURLException + */ public GlobusTask getTask(String accessToken, String taskId, Logger globusLogger) throws MalformedURLException { URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/" + taskId); @@ -313,9 +326,12 @@ public GlobusTask getTask(String accessToken, String taskId, Logger globusLogger return task; } - /** Globus call to get an access token for the user using the long-term token we hold. + /** + * Globus call to get an access token for the user using the long-term token we + * hold. * - * @param globusBasicToken - the base64 encoded Globus Basic token comprised of the : + * @param globusBasicToken - the base64 encoded Globus Basic token comprised of + * the : * @return - a valid Globus access token */ public static AccessToken getClientToken(String globusBasicToken) { @@ -433,7 +449,6 @@ static class MakeRequestResponse { } - /** * Cache of open download Requests This cache keeps track of the set of files * selected for transfer out (download) via Globus. It is a means of @@ -480,10 +495,11 @@ public String getGlobusAppUrlForDataset(Dataset d) { return getGlobusAppUrlForDataset(d, true, null); } - /** Generated the App URl for upload (in) or download (out) + /** + * Generated the App URl for upload (in) or download (out) * - * @param d - the dataset involved - * @param upload - boolean, true for upload, false for download + * @param d - the dataset involved + * @param upload - boolean, true for upload, false for download * @param dataFiles - a list of the DataFiles to be downloaded * @return */ @@ -516,7 +532,7 @@ public String getGlobusAppUrlForDataset(Dataset d, boolean upload, List downloadDFList) { return URLTokenUtil.getScriptForUrl(getGlobusAppUrlForDataset(dataset, false, downloadDFList)); - } @Asynchronous @@ -608,8 +605,8 @@ public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, S rulesCache.invalidate(ruleId); } } - - //Wait before first check + + // Wait before first check Thread.sleep(5000); // globus task status check task = globusStatusCheck(endpoint, taskIdentifier, globusLogger); @@ -907,8 +904,8 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro } task = globusStatusCheck(endpoint, taskIdentifier, globusLogger); String taskStatus = getTaskStatus(task); - - //Transfer is done (success or failure) so delete the rule + + // Transfer is done (success or failure) so delete the rule if (ruleId != null) { logger.info("Deleting: rule: " + ruleId); deletePermission(ruleId, dataset, globusLogger); @@ -1150,13 +1147,14 @@ private GlobusEndpoint getGlobusEndpoint(DvObject dvObject) { return endpoint; } - + // This helper method is called from the Download terms/guestbook/etc. popup, // when the user clicks the "ok" button. We use it, instead of calling // downloadServiceBean directly, in order to differentiate between single // file downloads and multiple (batch) downloads - since both use the same // terms/etc. popup. - public void writeGuestbookAndStartTransfer(GuestbookResponse guestbookResponse, boolean doNotSaveGuestbookResponse) { + public void writeGuestbookAndStartTransfer(GuestbookResponse guestbookResponse, + boolean doNotSaveGuestbookResponse) { PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); @@ -1170,7 +1168,7 @@ public void writeGuestbookAndStartTransfer(GuestbookResponse guestbookResponse, apiToken = new ApiToken(); apiToken.setTokenString(privUrl.getToken()); } - + DataFile df = guestbookResponse.getDataFile(); if (df != null) { logger.fine("Single datafile case for writeGuestbookAndStartTransfer"); @@ -1179,35 +1177,35 @@ public void writeGuestbookAndStartTransfer(GuestbookResponse guestbookResponse, if (!doNotSaveGuestbookResponse) { fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); } - PrimeFaces.current() - .executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, downloadDFList)); + PrimeFaces.current().executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, downloadDFList)); } else { - //Following FileDownloadServiceBean writeGuestbookAndStartBatchDownload + // Following FileDownloadServiceBean writeGuestbookAndStartBatchDownload List list = new ArrayList<>(Arrays.asList(guestbookResponse.getSelectedFileIds().split(","))); List selectedFiles = new ArrayList(); for (String idAsString : list) { try { Long fileId = Long.parseLong(idAsString); - // If we need to create a GuestBookResponse record, we have to - // look up the DataFile object for this file: - if (!doNotSaveGuestbookResponse) { - df = dataFileService.findCheapAndEasy(fileId); - guestbookResponse.setDataFile(df); - fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); - selectedFiles.add(df); - } + // If we need to create a GuestBookResponse record, we have to + // look up the DataFile object for this file: + if (!doNotSaveGuestbookResponse) { + df = dataFileService.findCheapAndEasy(fileId); + guestbookResponse.setDataFile(df); + fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); + selectedFiles.add(df); + } } catch (NumberFormatException nfe) { - logger.warning("A file id passed to the writeGuestbookAndStartTransfer method as a string could not be converted back to Long: " + idAsString); + logger.warning( + "A file id passed to the writeGuestbookAndStartTransfer method as a string could not be converted back to Long: " + + idAsString); return; } } if (!selectedFiles.isEmpty()) { - //Use dataset from one file - files should all be from the same dataset - PrimeFaces.current().executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, - selectedFiles)); + // Use dataset from one file - files should all be from the same dataset + PrimeFaces.current().executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, selectedFiles)); } } - } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusUtil.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusUtil.java new file mode 100644 index 00000000000..92cf8ac7704 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusUtil.java @@ -0,0 +1,33 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.List; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; + +public class GlobusUtil { + + public static JsonObject getFilesMap(List dataFiles, Dataset d) { + JsonObjectBuilder filesBuilder = Json.createObjectBuilder(); + for (DataFile df : dataFiles) { + String storageId = df.getStorageIdentifier(); + String[] parts = DataAccess + .getDriverIdAndStorageLocation(DataAccess.getLocationFromStorageId(storageId, d)); + String driverId = parts[0]; + String fileLocation = parts[1]; + if (GlobusAccessibleStore.isDataverseManaged(driverId)) { + String endpointWithBasePath = GlobusAccessibleStore.getTransferEnpointWithPath(driverId); + fileLocation = endpointWithBasePath + "/" + fileLocation; + } else { + fileLocation = storageId.substring(storageId.lastIndexOf("//") + 2); + } + filesBuilder.add(df.getId().toString(), fileLocation); + } + return filesBuilder.build(); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java index 792a9974076..856d71d7dc0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java @@ -6,7 +6,6 @@ import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.mocks.MocksFactory; import org.junit.jupiter.api.AfterEach; diff --git a/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java new file mode 100644 index 00000000000..56f8731b9c8 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java @@ -0,0 +1,88 @@ +package edu.harvard.iq.dataverse.globus; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.mock; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.Mockito; + +import edu.harvard.iq.dataverse.DOIServiceBean; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; +import edu.harvard.iq.dataverse.mocks.MocksFactory; +import edu.harvard.iq.dataverse.util.json.JsonUtil; +import jakarta.json.JsonObject; + +public class GlobusUtilTest { + + private Dataset dataset; + private DataFile mDatafile; + private DataFile rDatafile; + private String baseStoreId1 = "182ad2bda2f-c3508e719076"; + private String baseStoreId2 = "182ad2bda2f-c3508e719077"; + private String logoPath = "d7c42580-6538-4605-9ad8-116a61982644/hdc1/image002.mrc"; + private String authority = "10.5072"; + private String identifier = "F2ABCDEF"; + + @BeforeEach + public void setUp() { + + // Managed Globus Store + + // Nonsense endpoint/paths + System.setProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH, + "d7c42580-6538-4605-9ad8-116a61982644/hdc1"); + System.setProperty("dataverse.files.globusm.managed", "true"); + + // Remote Store + System.setProperty("dataverse.files.globusr.managed", "false"); + System.setProperty( + "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS, + "d7c42580-6538-4605-9ad8-116a61982644/hdc1"); + + dataset = MocksFactory.makeDataset(); + dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", + DOIServiceBean.DOI_RESOLVER_URL, null)); + mDatafile = MocksFactory.makeDataFile(); + mDatafile.setOwner(dataset); + mDatafile.setStorageIdentifier("globusm://" + baseStoreId1); + + rDatafile = MocksFactory.makeDataFile(); + rDatafile.setOwner(dataset); + rDatafile.setStorageIdentifier("globusr://" + baseStoreId2 + "//" + logoPath); + List files = new ArrayList(); + files.add(mDatafile); + files.add(rDatafile); + dataset.setFiles(files); + } + + @AfterEach + public void tearDown() { + System.clearProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH); + System.clearProperty("dataverse.files.globusm.managed"); + System.clearProperty("dataverse.files.globusr.managed"); + System.clearProperty( + "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS); + } + + + @Test + public void testgetFilesMap() { + + JsonObject jo = GlobusUtil.getFilesMap(dataset.getFiles(), dataset); + System.out.println(JsonUtil.prettyPrint(jo)); + assertEquals(jo.getString(Long.toString(mDatafile.getId())), "d7c42580-6538-4605-9ad8-116a61982644/hdc1/10.5072/F2ABCDEF/182ad2bda2f-c3508e719076"); + assertEquals(jo.getString(Long.toString(rDatafile.getId())), logoPath); + } +} From 4ba629d643678acdd0b649128b8a76a805ee6906 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 6 Dec 2023 15:28:32 -0500 Subject: [PATCH 0426/1112] adding review comment changes --- doc/release-notes/6.1-release-notes.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index d0fe895565c..38b99e6580b 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -11,6 +11,10 @@ This release contains major upgrades to core components. Detailed upgrade instru ## Detailed Release Highlights, New Features and Use Case Scenarios +### Optional support for guestbooks to appear when files access is requested rather than after access has been granted and a download is started +Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). + The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. + ### Dataverse installation can be now be configured to allow out-of-band upload In some situations, direct upload might not work from the UI, e.g., when s3 storage is not accessible from the internet. This pull request adds an option to [allow direct uploads via API only](https://github.com/IQSS/dataverse/pull/9003). This way, a third party application can use direct upload from within the internal network, while there is no direct download available to the users via UI. By default, Dataverse supports uploading files via the [add a file to a dataset](https://guides.dataverse.org/en/6.1/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). @@ -142,7 +146,7 @@ life easier during instance setups and reconfiguration. You no longer need to ge necessary JSON file. ### Adding PKCE Support - +[This PR adds PKCE support for OIDC providers](https://github.com/IQSS/dataverse/pull/9273) Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) From 93d9b35a07625622523a4490eee8f55d617defec Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 16:32:17 -0500 Subject: [PATCH 0427/1112] future test code - requires config of Globus stores --- .../harvard/iq/dataverse/api/DatasetsIT.java | 53 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 32 +++++++++++ .../dataaccess/GlobusOverlayAccessIOTest.java | 34 ++++++------ 3 files changed, 104 insertions(+), 15 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6a746b7c5b5..928574eb82b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -42,6 +42,9 @@ import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; +import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO; +import edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIOTest; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import org.apache.commons.lang3.StringUtils; @@ -135,6 +138,7 @@ public static void setUpClass() { .statusCode(200); */ } + @AfterAll public static void afterClass() { @@ -4175,4 +4179,53 @@ public void testGetUserPermissionsOnDataset() { Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getUserPermissionsOnDataset("testInvalidId", apiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } + + //Requires that a Globus remote store be set up as with the parameters in the GlobusOverlayAccessIOTest class + //Tests whether the API call succeeds and has some of the expected parameters + @Test + @Disabled + public void testGetGlobusUploadParameters() { + //Creates managed and remote Globus stores + GlobusOverlayAccessIOTest.setUp(); + + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String username = UtilIT.getUsernameFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + + Response setDriver = UtilIT.setDatasetStorageDriver(datasetId, System.getProperty("dataverse.files.globusr.label"), apiToken); + assertEquals(200, setDriver.getStatusCode()); + + Response getUploadParams = UtilIT.getDatasetGlobusUploadParameters(datasetId, "en_us", apiToken); + assertEquals(200, getUploadParams.getStatusCode()); + JsonObject data = JsonUtil.getJsonObject(getUploadParams.getBody().asString()); + JsonObject queryParams = data.getJsonObject("queryParameters"); + assertEquals("en_us", queryParams.getString("dvLocale")); + assertEquals("false", queryParams.getString("managed")); + //Assumes only one reference endpoint with a basepath is configured + assertTrue(queryParams.getJsonArray("referenceEndpointsWithPaths").get(0).toString().indexOf(System.getProperty("dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS)) > -1); + JsonArray signedUrls = data.getJsonArray("signedUrls"); + boolean found = false; + for (int i = 0; i < signedUrls.size(); i++) { + JsonObject signedUrl = signedUrls.getJsonObject(i); + if (signedUrl.getString("name").equals("requestGlobusReferencePaths")) { + found=true; + break; + } + } + assertTrue(found); + //Removes managed and remote Globus stores + GlobusOverlayAccessIOTest.tearDown(); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 869e755a183..bd2fe7e6f0b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3636,4 +3636,36 @@ static Response downloadTmpFile(String fullyQualifiedPathToFile, String apiToken .get("/api/admin/downloadTmpFile?fullyQualifiedPathToFile=" + fullyQualifiedPathToFile); } + static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(driverLabel) + .put("/api/datasets/" + datasetId + "/storageDriver"); + } + + + //Globus Store related - not currently used + + static Response getDatasetGlobusUploadParameters(Integer datasetId, String locale, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/datasets/" + datasetId + "/globusUploadParameters?locale=" + locale); + } + + static Response getDatasetGlobusDownloadParameters(Integer datasetId, String locale, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/datasets/" + datasetId + "/globusDownloadParameters?locale=" + locale); + } + + static Response requestGlobusDownload(Integer datasetId, JsonObject body, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(body) + .contentType("application/json") + .post("/api/datasets/" + datasetId + "/requestGlobusDownload"); + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java index 856d71d7dc0..1c84fa90a9e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java @@ -8,8 +8,9 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.mocks.MocksFactory; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import static org.junit.jupiter.api.Assertions.*; @@ -35,8 +36,8 @@ public class GlobusOverlayAccessIOTest { private String authority = "10.5072"; private String identifier = "F2ABCDEF"; - @BeforeEach - public void setUp() { + @BeforeAll + public static void setUp() { // Base Store System.setProperty("dataverse.files.base.type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); System.setProperty("dataverse.files.base.label", "default"); @@ -65,20 +66,11 @@ public void setUp() { "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS, "d7c42580-6538-4605-9ad8-116a61982644/hdc1"); System.setProperty("dataverse.files.globusr.remote-store-name", "DemoDataCorp"); - dataset = MocksFactory.makeDataset(); - dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", - DOIServiceBean.DOI_RESOLVER_URL, null)); - mDatafile = MocksFactory.makeDataFile(); - mDatafile.setOwner(dataset); - mDatafile.setStorageIdentifier("globusm://" + baseStoreId1); - rDatafile = MocksFactory.makeDataFile(); - rDatafile.setOwner(dataset); - rDatafile.setStorageIdentifier("globusr://" + baseStoreId2 + "//" + logoPath); } - @AfterEach - public void tearDown() { + @AfterAll + public static void tearDown() { System.clearProperty("dataverse.files.base.type"); System.clearProperty("dataverse.files.base.label"); System.clearProperty("dataverse.files.base.directory"); @@ -100,6 +92,18 @@ public void tearDown() { @Test void testGlobusOverlayIdentifiers() throws IOException { + + dataset = MocksFactory.makeDataset(); + dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", + DOIServiceBean.DOI_RESOLVER_URL, null)); + mDatafile = MocksFactory.makeDataFile(); + mDatafile.setOwner(dataset); + mDatafile.setStorageIdentifier("globusm://" + baseStoreId1); + + rDatafile = MocksFactory.makeDataFile(); + rDatafile.setOwner(dataset); + rDatafile.setStorageIdentifier("globusr://" + baseStoreId2 + "//" + logoPath); + assertTrue(GlobusOverlayAccessIO.isValidIdentifier("globusm", mDatafile.getStorageIdentifier())); assertTrue(GlobusOverlayAccessIO.isValidIdentifier("globusr", rDatafile.getStorageIdentifier())); assertFalse(GlobusOverlayAccessIO.isValidIdentifier("globusm", "globusr://localid//../of/the/hill")); From 12b7c306dd31ebd987a2bae5f36dae27e4f0ba56 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 16:32:24 -0500 Subject: [PATCH 0428/1112] typo --- .../iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java index 1c84fa90a9e..ad980aa28cd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java @@ -61,7 +61,7 @@ public static void setUp() { System.setProperty("dataverse.files.globusr.type", "globus"); System.setProperty("dataverse.files.globusr.base-store", "base"); System.setProperty("dataverse.files.globusr.managed", "false"); - System.setProperty("dataverse.files.globusm.label", "globusRemote"); + System.setProperty("dataverse.files.globusr.label", "globusRemote"); System.setProperty( "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS, "d7c42580-6538-4605-9ad8-116a61982644/hdc1"); From 1426dfb6fc52ace869e3c822a732d5b408ca7c4c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Dec 2023 16:47:54 -0500 Subject: [PATCH 0429/1112] add missing setting to release notes, add a todo to use two delays --- doc/release-notes/10162-globus-support.md | 7 ++++++- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 2 ++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/10162-globus-support.md b/doc/release-notes/10162-globus-support.md index d64e72b70a1..7bc3990f840 100644 --- a/doc/release-notes/10162-globus-support.md +++ b/doc/release-notes/10162-globus-support.md @@ -1,4 +1,6 @@ -Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible, and for referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. Further details can be found in the [Big Data Support section of the Dataverse Guides](https://guides.dataverse.org/en/latest/developers/big-data-support.html#big-data-support) +Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible, +and for referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. +Further details can be found in the [Big Data Support section of the Dataverse Guides](https://guides.dataverse.org/en/latest/developers/big-data-support.html#big-data-support) - Globus functionality remains 'experimental'/advanced in that it requires significant setup, differs in multiple ways from other file storage mechanisms, and may continue to evolve with the potential for backward incomatibilities. - The functionality is configured per store and replaces the previous single-S3-Connector-per-Dataverse-instance model - Adding files to a dataset, and accessing files is supported via the Dataverse user interface through a separate [dataverse-globus app](https://github.com/scholarsportal/dataverse-globus) @@ -10,5 +12,8 @@ Backward Incompatibilities: New JVM Options: - A new 'globus' store type and associated store-related options have been added. These are described in the [File Storage Options section of the Dataverse Guides](https://guides.dataverse.org/en/latest/installation/config.html#file-storage-using-a-local-filesystem-and-or-swift-and-or-object-stores-and-or-trusted-remote-stores). +- dataverse.files.globus-cache-maxage - specifies the number of minutes Dataverse will wait between an initial request for a file transfer occurs and when that transfer must begin. + + Obsolete Settings: the :GlobusBasicToken, :GlobusEndpoint, and :GlobusStores settings are no longer used diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 8cc8e491416..d0660a55a6a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -277,6 +277,8 @@ public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray ref * permission will not be revoked/deleted until the transfer is complete. This * is handled in other methods.) */ + // ToDo - nominally this doesn't need to be as long as the allowed time for the + // downloadCache so there could be two separate settings. // Single cache of open rules/permission requests private final Cache rulesCache = Caffeine.newBuilder() .expireAfterWrite(Duration.of(JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) From d2427bd39046f104c95e27d1869d1665b969724f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Dec 2023 09:49:52 -0500 Subject: [PATCH 0430/1112] #10151 incorporate recent additions --- doc/release-notes/6.1-release-notes.md | 22 +++++++++++++++++++++ doc/release-notes/8549-collection-quotas.md | 3 --- doc/release-notes/8760-bagit.md | 15 -------------- 3 files changed, 22 insertions(+), 18 deletions(-) delete mode 100644 doc/release-notes/8549-collection-quotas.md delete mode 100644 doc/release-notes/8760-bagit.md diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 38b99e6580b..38a7a1064e6 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -30,6 +30,28 @@ With the upload-out-of-band option enabled, it is also possible for file upload Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version since value of alternative title has changed from simple string to an array. For example, instead "value": "Alternative Title", the value can be "value": ["Alternative Title1", "Alternative Title2"] +### Collection Storage Size Quota Support +-This release adds support for defining storage size quotas for collections. Please see the API guide for details. This is an experimental feature that has not yet been used in production on any real life Dataverse instance, but we are planning to try it out at Harvard/IQSS. +Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the first, initial deployment of 6.1 + +### BagIT Export Configurations Updated +For BagIT export, it is now possible to configure the following information in bag-info.txt... + +Source-Organization: Harvard Dataverse +Organization-Address: 1737 Cambridge Street, Cambridge, MA, USA +Organization-Email: support@dataverse.harvard.edu + +... using new JVM/MPCONFIG options: + +- dataverse.bagit.sourceorg.name +- dataverse.bagit.sourceorg.address +- dataverse.bagit.sourceorg.email + +Previously, customization was possible by editing `Bundle.properties` but this is no longer supported. + +For details, see https://dataverse-guide--10122.org.readthedocs.build/en/10122/installation/config.html#bag-info-txt + + ### Improvements in the dataset versions API - optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions - a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output diff --git a/doc/release-notes/8549-collection-quotas.md b/doc/release-notes/8549-collection-quotas.md deleted file mode 100644 index b3635d0c5a1..00000000000 --- a/doc/release-notes/8549-collection-quotas.md +++ /dev/null @@ -1,3 +0,0 @@ -This release adds support for defining storage size quotas for collections. Please see the API guide for details. This is an experimental feature that has not yet been used in production on any real life Dataverse instance, but we are planning to try it out at Harvard/IQSS. -Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the first, initial deployment of 6.1 - diff --git a/doc/release-notes/8760-bagit.md b/doc/release-notes/8760-bagit.md deleted file mode 100644 index 30601857309..00000000000 --- a/doc/release-notes/8760-bagit.md +++ /dev/null @@ -1,15 +0,0 @@ -For BagIT export, it is now possible to configure the following information in bag-info.txt... - -Source-Organization: Harvard Dataverse -Organization-Address: 1737 Cambridge Street, Cambridge, MA, USA -Organization-Email: support@dataverse.harvard.edu - -... using new JVM/MPCONFIG options: - -- dataverse.bagit.sourceorg.name -- dataverse.bagit.sourceorg.address -- dataverse.bagit.sourceorg.email - -Previously, customization was possible by editing `Bundle.properties` but this is no longer supported. - -For details, see https://dataverse-guide--10122.org.readthedocs.build/en/10122/installation/config.html#bag-info-txt From 05c53066ea26c809b6376051ff336f11a4bcee9d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Dec 2023 10:29:47 -0500 Subject: [PATCH 0431/1112] mention download tmp file API #10151 --- doc/release-notes/6.1-release-notes.md | 1 + doc/release-notes/8760-download-tmp-file.md | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) delete mode 100644 doc/release-notes/8760-download-tmp-file.md diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 38a7a1064e6..1b4e884cded 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -73,6 +73,7 @@ This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/ - getMaxEmbargoDurationInMonths (/api/info/settings/:MaxEmbargoDurationInMonths): Get the maximum embargo duration in months, if available, configured through the database setting :MaxEmbargoDurationInMonths. - getDatasetJsonSchema (/api/dataverses/{id}/datasetSchema): Get a dataset schema with the fields required by a given dataverse collection. - validateDatasetJsonSchema (/api/dataverses/{id}/validateDatasetJson): Validate that a dataset JSON file is in proper format and contains the required elements and fields for a given dataverse collection. +- downloadTmpFile (/api/admin/downloadTmpFile): For testing purposes, allows files to be downloaded from /tmp. ### Extended the existing endpoints: - getVersionFiles (/api/datasets/{id}/versions/{versionId}/files): Extended to support optional filtering by search text through the `searchText` query parameter. The search will be applied to the labels and descriptions of the dataset files. Added `tabularTagName` to return files to which the particular tabular tag has been added. Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files. diff --git a/doc/release-notes/8760-download-tmp-file.md b/doc/release-notes/8760-download-tmp-file.md deleted file mode 100644 index 7623a91ac9a..00000000000 --- a/doc/release-notes/8760-download-tmp-file.md +++ /dev/null @@ -1,3 +0,0 @@ -A new API has been added for testing purposes that allows files to be downloaded from /tmp. - -See From 97c33218fa7224c544657e72f52c27d9cd8951bf Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Dec 2023 10:30:23 -0500 Subject: [PATCH 0432/1112] remove duplicate "new" heading in API changelog #10151 --- doc/sphinx-guides/source/api/changelog.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index d2908533a14..910134e14f3 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -12,9 +12,6 @@ New ~~~ - **/api/dataverses/{id}/datasetSchema**: See :ref:`get-dataset-json-schema`. - **/api/dataverses/{id}/validateDatasetJson**: See :ref:`validate-dataset-json`. - -New -~~~ - **/api/admin/clearThumbnailFailureFlag**: See :ref:`thumbnail_reset`. - **/api/admin/downloadTmpFile**: See :ref:`download-file-from-tmp`. From 3a13ac8c56385ed2cc82bcc9db4f57fea7688a67 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Dec 2023 10:34:21 -0500 Subject: [PATCH 0433/1112] #10151 add upgrade instructions --- doc/release-notes/6.1-release-notes.md | 81 +++++++++++++++++++ .../9002_allow_direct_upload_setting.md | 5 -- 2 files changed, 81 insertions(+), 5 deletions(-) delete mode 100644 doc/release-notes/9002_allow_direct_upload_setting.md diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 38a7a1064e6..d5972338124 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -51,6 +51,13 @@ Previously, customization was possible by editing `Bundle.properties` but this i For details, see https://dataverse-guide--10122.org.readthedocs.build/en/10122/installation/config.html#bag-info-txt +### Direct Upload setting added +A Dataverse installation can be now be configured to allow out-of-band upload by setting the `dataverse.files..upload-out-of-band` JVM option to `true`. + +By default, Dataverse supports uploading files via the [add a file to a dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). + +With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. + ### Improvements in the dataset versions API - optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions @@ -138,6 +145,7 @@ to generate updated versions. See also #10060. - Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) +- Validation has been added for the Geographic Bounding Box values in the Geospatial metadata block. This will prevent improperly defined bounding boxes from being created via the edit page or metadata imports. (issue 9547). This also fixes the issue where existing datasets with invalid geoboxes were quietly failing to get reindexed. ### Solr Improvements - As of this release application-side support is added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. @@ -214,6 +222,79 @@ Upgrading requires a maintenance window and downtime. Please plan ahead, create These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.0. +0\. These instructions assume that you are upgrading from 6.0. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to 5.14. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara6` + +(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin undeploy dataverse-6.0` + +2\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +3\. Start Payara + +- `service payara start` + +4\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-6.1.war` + +5\. Restart Payara + +- `service payara stop` +- `service payara start` + +6\. Update Geospatial Metadata Block (to improve validation of bounding box values) + +- `wget https://github.com/IQSS/dataverse/releases/download/v6.1/geospatial.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv` + +6a\. Update Citation Metadata Block (to make Alternative Title repeatable) + +- `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` + +7\. Upate Solr schema.xml to allow multiple Alternative Titles to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). + +7a\. For installations without custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) + +- Replace schema.xml + + - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` + +- Start Solr instance (usually `service solr start`, depending on Solr/OS) + +7b\. For installations with custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) + +- There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): +``` + wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/9.3.0/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.3.0/server/solr/collection1/conf/schema.xml +``` +OR, alternatively, you can edit the following line in your schema.xml by hand as follows (to indicate that alternative title is now `multiValued="true"`): +``` + +``` + +- Restart Solr instance (usually `service solr restart` depending on solr/OS) + +8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). + + ## Backward Incompatibilities - Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version - Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility, diff --git a/doc/release-notes/9002_allow_direct_upload_setting.md b/doc/release-notes/9002_allow_direct_upload_setting.md deleted file mode 100644 index 1e76ed4ad47..00000000000 --- a/doc/release-notes/9002_allow_direct_upload_setting.md +++ /dev/null @@ -1,5 +0,0 @@ -A Dataverse installation can be now be configured to allow out-of-band upload by setting the `dataverse.files..upload-out-of-band` JVM option to `true`. - -By default, Dataverse supports uploading files via the [add a file to a dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). - -With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. From a78213633e6f5bf345d1aedf4328eee5ee231ffb Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Dec 2023 10:43:40 -0500 Subject: [PATCH 0434/1112] #10151 remove notes previously incorporated --- .../9547-validation-for-geospatial-metadata.md | 9 --------- doc/release-notes/9859-ORE and Bag updates.md | 14 -------------- 2 files changed, 23 deletions(-) delete mode 100644 doc/release-notes/9547-validation-for-geospatial-metadata.md delete mode 100644 doc/release-notes/9859-ORE and Bag updates.md diff --git a/doc/release-notes/9547-validation-for-geospatial-metadata.md b/doc/release-notes/9547-validation-for-geospatial-metadata.md deleted file mode 100644 index a44e1a3732b..00000000000 --- a/doc/release-notes/9547-validation-for-geospatial-metadata.md +++ /dev/null @@ -1,9 +0,0 @@ -Validation has been added for the Geographic Bounding Box values in the Geospatial metadata block. This will prevent improperly defined bounding boxes from being created via the edit page or metadata imports. (issue 9547). This also fixes the issue where existing datasets with invalid geoboxes were quietly failing to get reindexed. - -For the "upgrade" steps section: - -Update Geospatial Metadata Block - -- `wget https://github.com/IQSS/dataverse/releases/download/v6.1/geospatial.tsv` -- `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv` - diff --git a/doc/release-notes/9859-ORE and Bag updates.md b/doc/release-notes/9859-ORE and Bag updates.md deleted file mode 100644 index dd3ae3bbbe1..00000000000 --- a/doc/release-notes/9859-ORE and Bag updates.md +++ /dev/null @@ -1,14 +0,0 @@ -Dataverse's OAI_ORE Metadata Export format and archival BagIT exports -(which include the OAI-ORE metadata export file) have been updated to include -information about the dataset version state, e.g. RELEASED or DEACCESSIONED -and to indicate which version of Dataverse was used to create the archival Bag. -As part of the latter, the current OAI_ORE Metadata format has been given a 1.0.0 -version designation and it is expected that any future changes to the OAI_ORE export -format will result in a version change and that tools such as DVUploader that can -recreate datasets from archival Bags will start indicating which version(s) of the -OAI_ORE format they can read. - -Dataverse installations that have been using archival Bags may wish to update any -existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse -[archival Bag export API](https://guides.dataverse.org/en/latest/installation/config.html#bagit-export-api-calls) -to generate updated versions. \ No newline at end of file From b517f6e0fca1802faa4455522a72e711963714ba Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Dec 2023 10:53:07 -0500 Subject: [PATCH 0435/1112] #10151 S3 test notes --- doc/release-notes/6.1-release-notes.md | 2 ++ doc/release-notes/6783-s3-tests.md | 3 --- 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 doc/release-notes/6783-s3-tests.md diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 9a35a31a734..375717ab9c9 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -163,6 +163,8 @@ For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.ht - `@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. - As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`. +- Developers can now test S3 locally by using the Dockerized development environment, which now includes both LocalStack and MinIO. API (end to end) tests are in S3AccessIT. +- In addition, a new integration test class (not an API test, the new Testcontainers-based test launched with `mvn verify`) has been added at S3AccessIOLocalstackIT. It uses Testcontainers to spin up Localstack for S3 testing and does not require Dataverse to be running. ## OpenID Connect Authentication Provider Improvements diff --git a/doc/release-notes/6783-s3-tests.md b/doc/release-notes/6783-s3-tests.md deleted file mode 100644 index 1b9bb400cc6..00000000000 --- a/doc/release-notes/6783-s3-tests.md +++ /dev/null @@ -1,3 +0,0 @@ -Developers can now test S3 locally by using the Dockerized development environment, which now includes both LocalStack and MinIO. API (end to end) tests are in S3AccessIT. - -In addition, a new integration test class (not an API test, the new Testcontainers-based test launched with `mvn verify`) has been added at S3AccessIOLocalstackIT. It uses Testcontainers to spin up Localstack for S3 testing and does not require Dataverse to be running. From 07a8659b60acdb766fb5a4742cf4ac4537e34615 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 8 Dec 2023 14:24:24 -0500 Subject: [PATCH 0436/1112] #10151 remove duplicate release note out of band setting previously added --- doc/release-notes/6.1-release-notes.md | 8 -------- 1 file changed, 8 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 375717ab9c9..b6bb7d8b806 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -51,14 +51,6 @@ Previously, customization was possible by editing `Bundle.properties` but this i For details, see https://dataverse-guide--10122.org.readthedocs.build/en/10122/installation/config.html#bag-info-txt -### Direct Upload setting added -A Dataverse installation can be now be configured to allow out-of-band upload by setting the `dataverse.files..upload-out-of-band` JVM option to `true`. - -By default, Dataverse supports uploading files via the [add a file to a dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). - -With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. - - ### Improvements in the dataset versions API - optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions - a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output From ed5b0dbde90fd4b8592aa2bdce7ae205482063c8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Dec 2023 15:44:18 -0500 Subject: [PATCH 0437/1112] Apply suggestions from code review Co-authored-by: Philip Durbin --- doc/release-notes/10162-globus-support.md | 2 +- doc/sphinx-guides/source/developers/big-data-support.rst | 4 ++-- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/10162-globus-support.md b/doc/release-notes/10162-globus-support.md index 7bc3990f840..60670b5b101 100644 --- a/doc/release-notes/10162-globus-support.md +++ b/doc/release-notes/10162-globus-support.md @@ -1,7 +1,7 @@ Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible, and for referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. Further details can be found in the [Big Data Support section of the Dataverse Guides](https://guides.dataverse.org/en/latest/developers/big-data-support.html#big-data-support) -- Globus functionality remains 'experimental'/advanced in that it requires significant setup, differs in multiple ways from other file storage mechanisms, and may continue to evolve with the potential for backward incomatibilities. +- Globus functionality remains 'experimental'/advanced in that it requires significant setup, differs in multiple ways from other file storage mechanisms, and may continue to evolve with the potential for backward incompatibilities. - The functionality is configured per store and replaces the previous single-S3-Connector-per-Dataverse-instance model - Adding files to a dataset, and accessing files is supported via the Dataverse user interface through a separate [dataverse-globus app](https://github.com/scholarsportal/dataverse-globus) - The functionality is also accessible via APIs (combining calls to the Dataverse and Globus APIs) diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index fe49f9f6150..8d891e63317 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -149,7 +149,7 @@ Globus File Transfer Note: Globus file transfer is still experimental but feedback is welcome! See :ref:`support`. -Users can transfer files via `Globus `_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) +Users can transfer files via `Globus `_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) and a community-developed `dataverse-globus `_ app has been properly installed and configured. Globus endpoints can be in a variety of places, from data centers to personal computers. @@ -168,7 +168,7 @@ Dataverse-managed endpoints must be Globus 'guest collections' hosted on either S3 connector which requires a paid Globus subscription at the host institution). In either case, Dataverse is configured with the Globus credentials of a user account that can manage the endpoint. Users will need a Globus account, which can be obtained via their institution or directly from Globus (at no cost). -With the file-system endpoint, Dataverse does not currently have access to the file contents. Thus, functionlity related to ingest, previews, fixity hash validation, etc. are not available. (Using the S3-based endpoint, Dataverse has access via S3 and all functionlity normally associated with direct uploads to S3 is available.) +With the file-system endpoint, Dataverse does not currently have access to the file contents. Thus, functionality related to ingest, previews, fixity hash validation, etc. are not available. (Using the S3-based endpoint, Dataverse has access via S3 and all functionality normally associated with direct uploads to S3 is available.) For the reference use case, Dataverse must be configured with a list of allowed endpoint/base paths from which files may be referenced. In this case, since Dataverse is not accessing the remote endpoint itself, it does not need Globus credentials. Users will need a Globus account in this case, and the remote endpoint must be configured to allow them access (i.e. be publicly readable, or potentially involving some out-of-band mechanism to request access (that could be described in the dataset's Terms of Use and Access). diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 5b2b6982866..37d80d0a6cd 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -71,7 +71,7 @@ The getDatasetMetadata and getFileListing URLs are just signed versions of the s If called for a dataset using a store that is configured with a remote Globus endpoint(s), the return response is similar but the response includes a the "managed" parameter will be false, the "endpoint" parameter is replaced with a JSON array of "referenceEndpointsWithPaths" and the requestGlobusTransferPaths and addGlobusFiles URLs are replaced with ones for requestGlobusReferencePaths and addFiles. All of these calls are -describe further below. +described further below. The call to set up for a transfer out (download) is similar: From 1d668970df1562c3cbc85d60be2abc55d8a96572 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 8 Dec 2023 15:56:27 -0500 Subject: [PATCH 0438/1112] #10151 standard guide links --- doc/release-notes/6.1-release-notes.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index b6bb7d8b806..24194a02026 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -49,14 +49,14 @@ Organization-Email: support@dataverse.harvard.edu Previously, customization was possible by editing `Bundle.properties` but this is no longer supported. -For details, see https://dataverse-guide--10122.org.readthedocs.build/en/10122/installation/config.html#bag-info-txt +For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt ### Improvements in the dataset versions API - optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions - a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output - when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. -This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide. +This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/6.1/api/native-api.html#dataset-versions-api) section of the Guide. ### The following API endpoints have been added: - deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession): version deaccessioning through API (Given a dataset and a version). @@ -128,13 +128,13 @@ Dataverse installations that have been using archival Bags may wish to update an existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse [archival Bag export API](https://guides.dataverse.org/en/latest/installation/config.html#bagit-export-api-calls) to generate updated versions. -- There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews +- There is now a Markdown (.md) previewer: https://guides.dataverse.org/en/6.1/user/dataset-management.html#file-previews - This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. - To fix #9952, we surround the license info with `<` and `>`. - To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information - To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. -- We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html +- We have started maintaining an API changelog: https://guides.dataverse.org/en/6.1/api/changelog.html See also #10060. - Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) From 85206de08acb6a8373199fb0d4eec2768cb6763d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 8 Dec 2023 15:59:21 -0500 Subject: [PATCH 0439/1112] simply API changelog to be about breaking changes only #10151 --- doc/release-notes/6.1-release-notes.md | 2 +- doc/sphinx-guides/source/api/changelog.rst | 19 +++++-------------- 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 24194a02026..a3b04749d68 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -134,7 +134,7 @@ to generate updated versions. - To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information - To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. -- We have started maintaining an API changelog: https://guides.dataverse.org/en/6.1/api/changelog.html +- We have started maintaining an API changelog of breaking changes: https://guides.dataverse.org/en/6.1/api/changelog.html See also #10060. - Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 910134e14f3..20225b99b5c 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -1,5 +1,7 @@ -API Changelog -============= +API Changelog (Breaking Changes) +================================ + +This API changelog is experimental and we would love feedback on its usefulness. Its primary purpose is to inform API developers of any breaking changes. (We try not ship any backward incompatible changes, but it happens.) To see a list of new APIs and backward-compatible changes to existing API, please see each version's release notes at https://github.com/IQSS/dataverse/releases .. contents:: |toctitle| :local: @@ -8,20 +10,9 @@ API Changelog v6.1 ---- -New -~~~ -- **/api/dataverses/{id}/datasetSchema**: See :ref:`get-dataset-json-schema`. -- **/api/dataverses/{id}/validateDatasetJson**: See :ref:`validate-dataset-json`. -- **/api/admin/clearThumbnailFailureFlag**: See :ref:`thumbnail_reset`. -- **/api/admin/downloadTmpFile**: See :ref:`download-file-from-tmp`. - -Changes -~~~~~~~ -- **/api/datasets/{id}/versions/{versionId}/citation**: This endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. See :ref:`get-citation`. +- The metadata field "Alternative Title" now supports multiple values so you must pass an array rather than a string when populating that field via API. See https://github.com/IQSS/dataverse/pull/9440 v6.0 ---- -Changes -~~~~~~~ - **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. See :doc:`dataaccess`. From 0cd87d167211ee6bc047de3cba3e79acfb520e28 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Dec 2023 16:37:03 -0500 Subject: [PATCH 0440/1112] address Review comments --- .../source/admin/integrations.rst | 12 ++++++++ doc/sphinx-guides/source/api/intro.rst | 4 +++ .../source/developers/globus-api.rst | 6 ++-- .../source/installation/config.rst | 9 +++--- .../edu/harvard/iq/dataverse/DatasetPage.java | 29 ------------------- .../AbstractRemoteOverlayAccessIO.java | 2 +- 6 files changed, 25 insertions(+), 37 deletions(-) diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index 9a24cf0715c..db566106b49 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -121,6 +121,18 @@ Its goal is to make the dashboard adjustable for a Dataverse installation's need The integrations dashboard is currently in development. A preview and more information can be found at: `rdm-integration GitHub repository `_ +Globus +++++++ + +Globus transfer uses an efficient transfer mechanism and has additional features that make it suitable for large files and large numbers of files: + +* robust file transfer capable of restarting after network or endpoint failures +* third-party transfer, which enables a user accessing a Dataverse installation in their desktop browser to initiate transfer of their files from a remote endpoint (i.e. on a local high-performance computing cluster), directly to an S3 store managed by the Dataverse installation + +Users can transfer files via `Globus `_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) +and a community-developed `dataverse-globus `_ app has been properly installed and configured. + + Embedding Data on Websites -------------------------- diff --git a/doc/sphinx-guides/source/api/intro.rst b/doc/sphinx-guides/source/api/intro.rst index 6c61bb8c20d..8eb11798dd7 100755 --- a/doc/sphinx-guides/source/api/intro.rst +++ b/doc/sphinx-guides/source/api/intro.rst @@ -187,6 +187,10 @@ Lists of Dataverse APIs - Files - etc. +- :doc:`/developers/dataset-semantic-metadata-api`: For creating, reading, editing, and deleting dataset metadata using JSON-LD. +- :doc:`/developers/dataset-migration-api`: For migrating datasets from other repositories while retaining the original persistent identifiers and publication date. +- :doc:`/developers/s3-direct-upload-api`: For the transfer of larger files/larger numbers of files directly to an S3 bucket managed by Dataverse. +- :doc:`/developers/globus-api`: For the Globus transfer of larger files/larger numbers of files directly via Globus endpoints managed by Dataverse or referencing files in remote endpoints. - :doc:`metrics`: For query statistics about usage of a Dataverse installation. - :doc:`sword`: For depositing data using a standards-based approach rather than the :doc:`native-api`. diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 37d80d0a6cd..de9df06a798 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -160,11 +160,11 @@ In the managed case, once a Globus transfer has been initiated a final API call export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV - export JSON_DATA="{"taskIdentifier":"3f530302-6c48-11ee-8428-378be0d9c521", \ + export JSON_DATA='{"taskIdentifier":"3f530302-6c48-11ee-8428-378be0d9c521", \ "files": [{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b3972213f-f6b5c2221423", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "1234"}}, \ - {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b39722140-50eb7d3c5ece", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "2345"}}]}" + {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b39722140-50eb7d3c5ece", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "2345"}}]}' - curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA"" + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA" Note that the mimetype is multipart/form-data, matching the /addFiles API call. ALso note that the API_TOKEN is not needed when using a signed URL. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4540219fc7c..f6c05a3bde8 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -499,8 +499,8 @@ Logging & Slow Performance .. _file-storage: -File Storage: Using a Local Filesystem and/or Swift and/or Object Stores and/or Trusted Remote Stores and/or Globus Stores --------------------------------------------------------------------------------------------------------------------------- +File Storage +------------ By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara6/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\.directory`` JVM option described below. @@ -999,7 +999,8 @@ See :doc:`/developers/big-data-support` for additional information on how to use In addition to having the type "globus" and requiring a label, Globus Stores share many options with Trusted Remote Stores and options to specify and access a Globus endpoint(s). As with Remote Stores, Globus Stores also use a baseStore - a file, s3, or swift store that can be used to store additional ancillary dataset files (e.g. metadata exports, thumbnails, auxiliary files, etc.). These and other available options are described in the table below. -There are two types of Globus stores +There are two types of Globus stores: + - managed - where Dataverse manages the Globus endpoint, deciding where transferred files are stored and managing access control for users transferring files to/from Dataverse - remote - where Dataverse references files that remain on trusted remote Globus endpoints @@ -1024,7 +1025,7 @@ Once you have configured a globus store, it is recommended that you install the dataverse.files..globus-token A Globus token (base64 endcoded : for a managed store) - using a microprofile alias is recommended (none) dataverse.files..reference-endpoints-with-basepaths A comma separated list of *remote* trusted Globus endpoint id/s (none) - dataverse.files..files-not-accessible-by-dataverse ``true``/``false`` Should be true for S3 Connector-based *managed* stores ``false`` + dataverse.files..files-not-accessible-by-dataverse ``true``/``false`` Should be false for S3 Connector-based *managed* stores, true for others ``false`` ======================================================= ================== ========================================================================== =================== diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 9c7d599ba33..b79f387f20b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -5312,36 +5312,7 @@ public boolean isFileAccessRequestMultiButtonEnabled(){ } return false; } -/* These appear to be unused - toDo - delete - private Boolean downloadButtonAllEnabled = null; - public boolean isDownloadAllButtonEnabled() { - - if (downloadButtonAllEnabled == null) { - for (FileMetadata fmd : workingVersion.getFileMetadatas()) { - if (!this.fileDownloadHelper.canDownloadFile(fmd)) { - downloadButtonAllEnabled = false; - break; - } - } - downloadButtonAllEnabled = true; - } - return downloadButtonAllEnabled; - } - - public boolean isDownloadSelectedButtonEnabled(){ - - if( this.selectedFiles == null || this.selectedFiles.isEmpty() ){ - return false; - } - for (FileMetadata fmd : this.selectedFiles){ - if (this.fileDownloadHelper.canDownloadFile(fmd)){ - return true; - } - } - return false; - } -*/ public boolean isFileAccessRequestMultiSignUpButtonRequired(){ if (isSessionUserAuthenticated()){ return false; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java index 6c26502acfa..10ff68a56f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java @@ -50,7 +50,7 @@ public abstract class AbstractRemoteOverlayAccessIO extends protected static final String REMOTE_STORE_URL = "remote-store-url"; // Whether Dataverse can access the file bytes - //Currently True for the Globus store when using the S3Connector, and Remote Stores like simple web servers where the URLs resolve to the actual file bits + // Currently False only for the Globus store when using the S3Connector, and Remote Stores like simple web servers where the URLs resolve to the actual file bits static final String FILES_NOT_ACCESSIBLE_BY_DATAVERSE = "files-not-accessible-by-dataverse"; protected StorageIO baseStore = null; From 9dd3f9785c6a5c8939bd9f023400f5f10c3ef58d Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 11 Dec 2023 09:28:16 +0000 Subject: [PATCH 0441/1112] Added: release notes for #10155 --- .../10155-datasets-can-download-at-least-one-file.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10155-datasets-can-download-at-least-one-file.md diff --git a/doc/release-notes/10155-datasets-can-download-at-least-one-file.md b/doc/release-notes/10155-datasets-can-download-at-least-one-file.md new file mode 100644 index 00000000000..566d505f7ca --- /dev/null +++ b/doc/release-notes/10155-datasets-can-download-at-least-one-file.md @@ -0,0 +1,3 @@ +The getCanDownloadAtLeastOneFile (/api/datasets/{id}/versions/{versionId}/canDownloadAtLeastOneFile) endpoint has been created. + +This endpoint allows to know if the calling user can download at least one file of a particular dataset version. From 9fb44d3d45080a2e5c9de15ab0445cc052c956b3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 11 Dec 2023 09:33:56 +0000 Subject: [PATCH 0442/1112] Added: docs for #10155 --- doc/sphinx-guides/source/api/native-api.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 56190dd342c..99438520120 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2686,6 +2686,19 @@ In particular, the user permissions that this API call checks, returned as boole curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/datasets/$ID/userPermissions" +Know if a User can download at least one File from a Dataset Version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call allows to know if the calling user can download at least one file of a dataset version. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 + + curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/datasets/$ID/versions/$VERSION/canDownloadAtLeastOneFile" + Files ----- From ca706662cd9f19b36d31530cf2747d810923ca3e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 11 Dec 2023 11:06:36 -0500 Subject: [PATCH 0443/1112] bug fix - allowing S3 w/Globus config to work for download --- .../iq/dataverse/dataaccess/GlobusAccessibleStore.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java index e4d062f0619..8bed60d8302 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java @@ -65,7 +65,11 @@ public static String getGlobusToken(String storeId) { } public static boolean isGlobusAccessible(String storeId) { - if(StorageIO.getConfigParamForDriver(storeId, StorageIO.TYPE).equals(DataAccess.GLOBUS)) { + String type = StorageIO.getConfigParamForDriver(storeId, StorageIO.TYPE); + if (type.equals(DataAccess.GLOBUS)) { + return true; + } else if (type.equals(DataAccess.S3) + && StorageIO.getConfigParamForDriver(storeId, TRANSFER_ENDPOINT_WITH_BASEPATH) != null) { return true; } return false; From 09a227b30a2b5da05829297a9173952596e2df9c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 11 Dec 2023 11:12:04 -0500 Subject: [PATCH 0444/1112] Change docs tp make clear that an S3 store can be used --- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f6c05a3bde8..a7d7905ca4a 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1004,10 +1004,10 @@ There are two types of Globus stores: - managed - where Dataverse manages the Globus endpoint, deciding where transferred files are stored and managing access control for users transferring files to/from Dataverse - remote - where Dataverse references files that remain on trusted remote Globus endpoints -For managed stores, there are two variants, connecting to standard/file-based Globus endpoints and to endpoints using an underlying S3 store via the Globus S3 Connector. +A managed Globus store connects to standard/file-based Globus endpoint. It is also possible to configure an S3 store as a managed store, if the managed endpoint uses an underlying S3 store via the Globus S3 Connector. With the former, Dataverse has no direct access to the file contents and functionality related to ingest, fixity hash validation, etc. are not available. With the latter, Dataverse can access files internally via S3 and the functionality supported is similar to that when using S3 direct upload. -Once you have configured a globus store, it is recommended that you install the `dataverse-globus app `_ to allow transfers in/out of Dataverse to be initated via the Dataverse user interface. Alternately, you can point your users to the :doc:`/developers/globus-api` for information about API support. +Once you have configured a globus store, or configured an S3 store for Globus access, it is recommended that you install the `dataverse-globus app `_ to allow transfers in/out of Dataverse to be initated via the Dataverse user interface. Alternately, you can point your users to the :doc:`/developers/globus-api` for information about API support. .. table:: :align: left From 44bd5b7fb6d697d356d857a73847e1637aaa5763 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 11 Dec 2023 11:19:46 -0500 Subject: [PATCH 0445/1112] add perf test results --- doc/release-notes/6.1-release-notes.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index a3b04749d68..b03a7a62baa 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -258,7 +258,7 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa - `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` -7\. Upate Solr schema.xml to allow multiple Alternative Titles to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). +7\. Update Solr schema.xml to allow multiple Alternative Titles to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). 7a\. For installations without custom or experimental metadata blocks: @@ -298,6 +298,10 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as For the complete list of code changes in this release, see the [6.1 Milestone](https://github.com/IQSS/dataverse/milestone/110?closed=1) in GitHub. +## Performance Testing Results +The results of performance testing can be found here: +https://docs.google.com/spreadsheets/d/1lwPlifvgu3-X_6xLwq6Zr6sCOervr1mV_InHIWjh5KA/edit#gid=0 + ## Getting Help For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. From 173b8a7a067b392de8e1c900c3e1d9eb806c71d6 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 11 Dec 2023 11:25:44 -0500 Subject: [PATCH 0446/1112] fix backward comp Alternative Title --- doc/release-notes/6.1-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index b03a7a62baa..5bc0df4640c 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -291,7 +291,7 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as ## Backward Incompatibilities -- Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version +- Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version. Alternative Title must now be passed as an array of strings rather than a single string ([alt title]) - Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility, ## Complete List of Changes From 1959f2ff22d9bbc4290a586fc49f1f49eccdbd04 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 11 Dec 2023 11:29:24 -0500 Subject: [PATCH 0447/1112] removed unneeded header --- doc/release-notes/6.1-release-notes.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 5bc0df4640c..6d3d1912f81 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -6,11 +6,8 @@ This release brings new features, enhancements, and bug fixes to the Dataverse s Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. ## Release Highlights (Major Upgrades, Breaking Changes) - This release contains major upgrades to core components. Detailed upgrade instructions can be found below. -## Detailed Release Highlights, New Features and Use Case Scenarios - ### Optional support for guestbooks to appear when files access is requested rather than after access has been granted and a download is started Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. From a4e25e17155896ae5c335ea8169229f248eaf22b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Dec 2023 12:15:56 -0500 Subject: [PATCH 0448/1112] reorg 6.1 release notes, add globus #10151 --- doc/release-notes/6.1-release-notes.md | 262 +++++++++++++------------ 1 file changed, 137 insertions(+), 125 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 6d3d1912f81..475d4fc0887 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -5,57 +5,96 @@ Please note: To read these instructions in full, please go to https://github.com This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. -## Release Highlights (Major Upgrades, Breaking Changes) -This release contains major upgrades to core components. Detailed upgrade instructions can be found below. +## Release highlights -### Optional support for guestbooks to appear when files access is requested rather than after access has been granted and a download is started -Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). - The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. +### Guestbook at request + +Dataverse can now be configured (via the `dataverse.files.guestbook-at-request` option) to display any configured guestbook to users when they request restricted files (new functionality) or when they download files (previous behavior). + +The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default, showing guestbooks when files are downloaded, remains as it was in prior Dataverse versions. + +### Collection-level storage quotas + +This release adds support for defining storage size quotas for collections. Please see the API guide for details. This is an experimental feature that has not yet been used in production on any real life Dataverse instance, but we are planning to try it out at Harvard/IQSS. +Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the first, initial deployment of Dataverse 6.1. + +### Globus support + +Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible and for the case of referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. Please note: + +- Globus functionality remains experimental/advanced in that it requires significant setup, differs in multiple ways from other file storage mechanisms, and may continue to evolve with the potential for backward incompatibilities. +- The functionality is configured per store and replaces the previous single-S3-Connector-per-Dataverse-instance model. +- Adding files to a dataset, and accessing files is supported via the Dataverse user interface through a separate [dataverse-globus app](https://github.com/scholarsportal/dataverse-globus). +- The functionality is also accessible via APIs (combining calls to the Dataverse and Globus APIs) + +Backward incompatibilities: +- The configuration for use of a Globus S3 Connector has changed and is aligned with the standard store configuration mechanism +- The new functionality is incompatible with older versions of the globus-dataverse app and the Globus-related functionality in the UI will only function correctly if a Dataverse 6.1 compatible version of the dataverse-globus app is configured. + +New JVM options: +- A new "globus" store type and associated store-related options have been added. These are described in the [File Storage Options section of the Dataverse Guides](https://guides.dataverse.org/en/latest/installation/config.html#file-storage-using-a-local-filesystem-and-or-swift-and-or-object-stores-and-or-trusted-remote-stores). +- dataverse.files.globus-cache-maxage - specifies the number of minutes Dataverse will wait between an initial request for a file transfer occurs and when that transfer must begin. + +Obsolete Settings: the :GlobusBasicToken, :GlobusEndpoint, and :GlobusStores settings are no longer used + +Further details can be found in the [Big Data Support section of the Dataverse Guides](https://guides.dataverse.org/en/6.1/developers/big-data-support.html#big-data-support) + +### Alternative Title now allows multiple values + +Alternative Title now allows multiples. Note that JSON used to create a dataset with an Alternate Title must be changed. See "Backward incompatibilities" below for details. + +### External tools: configure tools now available at the dataset level + +Read/write "configure" tools (a type of external tool) are now available at the dataset level. They appear under the "Edit Dataset" menu. See also #9589. + +### S3 out-of-band upload -### Dataverse installation can be now be configured to allow out-of-band upload In some situations, direct upload might not work from the UI, e.g., when s3 storage is not accessible from the internet. This pull request adds an option to [allow direct uploads via API only](https://github.com/IQSS/dataverse/pull/9003). This way, a third party application can use direct upload from within the internal network, while there is no direct download available to the users via UI. By default, Dataverse supports uploading files via the [add a file to a dataset](https://guides.dataverse.org/en/6.1/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. -### Alternative Title is made repeatable. -- One will need to update database with updated citation block. - `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` -- One will also need to update Solr schema: - Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-9.3.0/server/solr/collection1/conf/schema.xml` - Reload Solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` +### JSON Schema for datasets -Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version since value of alternative title has changed from simple string to an array. -For example, instead "value": "Alternative Title", the value can be "value": ["Alternative Title1", "Alternative Title2"] +Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) -### Collection Storage Size Quota Support --This release adds support for defining storage size quotas for collections. Please see the API guide for details. This is an experimental feature that has not yet been used in production on any real life Dataverse instance, but we are planning to try it out at Harvard/IQSS. -Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the first, initial deployment of 6.1 +### OpenID Connect authentication provider improvements -### BagIT Export Configurations Updated -For BagIT export, it is now possible to configure the following information in bag-info.txt... +#### Using MicroProfile Config for provisioning -Source-Organization: Harvard Dataverse -Organization-Address: 1737 Cambridge Street, Cambridge, MA, USA -Organization-Email: support@dataverse.harvard.edu +With this release it is possible to provision a single OIDC-based authentication provider +by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. -... using new JVM/MPCONFIG options: +If you are using an external OIDC provider component as an identity management system and/or broker +to other authentication providers such as Google, eduGain SAML and so on, this might make your +life easier during instance setups and reconfiguration. You no longer need to generate the +necessary JSON file. -- dataverse.bagit.sourceorg.name -- dataverse.bagit.sourceorg.address -- dataverse.bagit.sourceorg.email +#### Adding PKCE Support -Previously, customization was possible by editing `Bundle.properties` but this is no longer supported. +[This PR adds PKCE support for OIDC providers](https://github.com/IQSS/dataverse/pull/9273) +Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable +support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) -For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt +### Solr improvements -### Improvements in the dataset versions API -- optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions -- a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output -- when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. +As of this release, application-side support has been added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. -This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/6.1/api/native-api.html#dataset-versions-api) section of the Guide. +Please see the "Installing Solr" section of the Installation Prerequisites guide. + +### New release of Dataverse Previewers (including a Markdown previewer) + +Version 1.4 of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. Please note: + +- SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. +- Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. +- There is now a Markdown (.md) previewer: https://guides.dataverse.org/en/6.1/user/dataset-management.html#file-previews + +### New or improved APIs + +The development of a [new UI for Dataverse](https://github.com/IQSS/dataverse-frontend) is driving the addition or improvement of many APIs. + +#### New API endpoints -### The following API endpoints have been added: - deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession): version deaccessioning through API (Given a dataset and a version). - /api/files/{id}/downloadCount - /api/files/{id}/dataTables @@ -71,7 +110,33 @@ This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/ - validateDatasetJsonSchema (/api/dataverses/{id}/validateDatasetJson): Validate that a dataset JSON file is in proper format and contains the required elements and fields for a given dataverse collection. - downloadTmpFile (/api/admin/downloadTmpFile): For testing purposes, allows files to be downloaded from /tmp. -### Extended the existing endpoints: +#### Pagination of files in dataset versions + +- optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions +- a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output +- when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. + +This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/6.1/api/native-api.html#dataset-versions-api) section of the Guide. + + +#### DataFile API payload has been extended to include the following fields + +- tabularData: Boolean field to know if the DataFile is of tabular type +- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) +- friendlyType: String + +#### The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support pagination, ordering, and optional filtering + +- Access status: through the `accessStatus` query parameter, which supports the following values: + - Public + - Restricted + - EmbargoedThenRestricted + - EmbargoedThenPublic +- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added. +- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png". + +#### Additional improvements to existing API endpoints + - getVersionFiles (/api/datasets/{id}/versions/{versionId}/files): Extended to support optional filtering by search text through the `searchText` query parameter. The search will be applied to the labels and descriptions of the dataset files. Added `tabularTagName` to return files to which the particular tabular tag has been added. Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files. - getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain file counts. Added support for filtering by optional criteria query parameter: - contentType @@ -93,25 +158,21 @@ This parameter applies a filter criteria to the operation and supports the follo - Can delete the dataset draft - getDatasetVersionCitation (/api/datasets/{id}/versions/{versionId}/citation) endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. -### DataFile API payload has been extended to include the following fields: -- tabularData: Boolean field to know if the DataFile is of tabular type -- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) -- friendlyType: String +### Improvements for developers -### The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support pagination, ordering, and optional filtering -- Access status: through the `accessStatus` query parameter, which supports the following values: - - Public - - Restricted - - EmbargoedThenRestricted - - EmbargoedThenPublic -- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added. -- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png". +- Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using Netbeans or IntelliJ IDEA Ultimate (with the Payara Platform Tools plugin). For details, see https://guides.dataverse.org/en/6.1/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools and [the thread](https://groups.google.com/g/dataverse-community/c/zNBDzSMF2Q0/m/Z-xS6fA2BgAJ) on the mailing list. +- Developers can now test S3 locally by using the Dockerized development environment, which now includes both LocalStack and MinIO. API (end to end) tests are in S3AccessIT. +- In addition, a new integration test class (not an API test, the new Testcontainers-based test launched with `mvn verify`) has been added at S3AccessIOLocalstackIT. It uses Testcontainers to spin up Localstack for S3 testing and does not require Dataverse to be running. +- With this release, we add a new type of testing to Dataverse: integration tests which are not end-to-end tests (like our API tests). Starting with OIDC authentication support, we test regularly on CI for working condition of both OIDC login options in UI and API. +- The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21. +- The support for setting JVM options during testing has been improved for developers. You now may add the `@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. +- As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`. +## Major use cases and infrastructure enhancements -### Misc -- Configure tools are now available at the dataset level. They appear under the "Edit Dataset" menu. See also #9589. -- Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). -The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. +Changes and fixes in this release not already mentioned above include: + +- Validation has been added for the Geographic Bounding Box values in the Geospatial metadata block. This will prevent improperly defined bounding boxes from being created via the edit page or metadata imports. (issue #9547). This also fixes the issue where existing datasets with invalid geoboxes were quietly failing to get reindexed. - Dataverse's OAI_ORE Metadata Export format and archival BagIT exports (which include the OAI-ORE metadata export file) have been updated to include information about the dataset version state, e.g. RELEASED or DEACCESSIONED @@ -125,68 +186,18 @@ Dataverse installations that have been using archival Bags may wish to update an existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse [archival Bag export API](https://guides.dataverse.org/en/latest/installation/config.html#bagit-export-api-calls) to generate updated versions. -- There is now a Markdown (.md) previewer: https://guides.dataverse.org/en/6.1/user/dataset-management.html#file-previews +- For BagIT export, it is now possible to configure the following information in bag-info.txt. (Previously, customization was possible by editing `Bundle.properties` but this is no longer supported.) For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt + - Source-Organization from `dataverse.bagit.sourceorg.name`. + - Organization-Address from `dataverse.bagit.sourceorg.address`. + - Organization-Email from `dataverse.bagit.sourceorg.address`. - This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. - To fix #9952, we surround the license info with `<` and `>`. - To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information - To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. - - We have started maintaining an API changelog of breaking changes: https://guides.dataverse.org/en/6.1/api/changelog.html See also #10060. -- Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) -- Validation has been added for the Geographic Bounding Box values in the Geospatial metadata block. This will prevent improperly defined bounding boxes from being created via the edit page or metadata imports. (issue 9547). This also fixes the issue where existing datasets with invalid geoboxes were quietly failing to get reindexed. - -### Solr Improvements -- As of this release application-side support is added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. - -Please see the "Installing Solr" section of the Installation Prerequisites guide. - - -### Development -- Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using Netbeans or IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. -For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools and [the thread](https://groups.google.com/g/dataverse-community/c/zNBDzSMF2Q0/m/Z-xS6fA2BgAJ) on the mailing list. -- A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. - - SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. - - Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. -- `@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is -also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. -- As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`. -- Developers can now test S3 locally by using the Dockerized development environment, which now includes both LocalStack and MinIO. API (end to end) tests are in S3AccessIT. -- In addition, a new integration test class (not an API test, the new Testcontainers-based test launched with `mvn verify`) has been added at S3AccessIOLocalstackIT. It uses Testcontainers to spin up Localstack for S3 testing and does not require Dataverse to be running. - -## OpenID Connect Authentication Provider Improvements - -### Using MicroProfile Config For Provisioning - -With this release it is possible to provision a single OIDC-based authentication provider -by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. - -If you are using an external OIDC provider component as an identity management system and/or broker -to other authentication providers such as Google, eduGain SAML and so on, this might make your -life easier during instance setups and reconfiguration. You no longer need to generate the -necessary JSON file. - -### Adding PKCE Support -[This PR adds PKCE support for OIDC providers](https://github.com/IQSS/dataverse/pull/9273) -Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable -support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) - -## Improved Testing - -With this release, we add a new type of testing to Dataverse: integration tests which are no end-to-end tests -like our API tests. Starting with OIDC authentication support, we test regularly on CI for working condition -of both OIDC login options in UI and API. - -The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21. - -The support for setting JVM options during testing has been improved for developers. You now may add the -`@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is -also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. - -As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`. - -## New Configuration Options +## New configuration options - dataverse.auth.oidc.enabled - dataverse.auth.oidc.client-id @@ -199,8 +210,24 @@ As part of these testing improvements, the code coverage report file for unit te - dataverse.auth.oidc.pkce.max-cache-size - dataverse.auth.oidc.pkce.max-cache-age - dataverse.files.{driverId}.upload-out-of-band +- dataverse.files.globus-cache-maxage - dataverse.files.guestbook-at-request +## Backward incompatibilities + +- Since Alternative Title is now repeatable, the JSON you send to create or edit a dataset must be an array rather than a simple string. For example, instead of "value": "Alternative Title", you must send "value": ["Alternative Title1", "Alternative Title2"] +- Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility. See above for details. +- For BagIT export, if you were configuring values in bag-info.txt using `Bundle.properties`, you must switch to the new JVM options `dataverse.bagit.sourceorg.name`, `dataverse.bagit.sourceorg.address`, and `dataverse.bagit.sourceorg.email`. For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt +- See "Globus support" above for backward incompatibilies specific to Globus. + +## Complete list of changes + +For the complete list of code changes in this release, see the [6.1 Milestone](https://github.com/IQSS/dataverse/milestone/110?closed=1) in GitHub. + +## Getting help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. + ## Installation If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! @@ -209,7 +236,7 @@ Once you are in production, we would be delighted to update our [map of Datavers You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC). -## Upgrade Instructions +## Upgrade instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.0. @@ -241,6 +268,8 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa - `$PAYARA/bin/asadmin deploy dataverse-6.1.war` +As noted above, deployment of the war file might take several minutes due a database migration script required for the new storage quotas feature. + 5\. Restart Payara - `service payara stop` @@ -255,7 +284,7 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa - `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` -7\. Update Solr schema.xml to allow multiple Alternative Titles to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). +7\. Upate Solr schema.xml to allow multiple Alternative Titles to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). 7a\. For installations without custom or experimental metadata blocks: @@ -285,20 +314,3 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as - Restart Solr instance (usually `service solr restart` depending on solr/OS) 8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). - - -## Backward Incompatibilities -- Since Alternative Title is repeatable now, old JSON APIs would not be compatible with a new version. Alternative Title must now be passed as an array of strings rather than a single string ([alt title]) -- Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility, - -## Complete List of Changes - -For the complete list of code changes in this release, see the [6.1 Milestone](https://github.com/IQSS/dataverse/milestone/110?closed=1) in GitHub. - -## Performance Testing Results -The results of performance testing can be found here: -https://docs.google.com/spreadsheets/d/1lwPlifvgu3-X_6xLwq6Zr6sCOervr1mV_InHIWjh5KA/edit#gid=0 - -## Getting Help - -For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. From 011b9291e6f694631d237bd047c3a170e6e93a2e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Dec 2023 13:58:08 -0500 Subject: [PATCH 0449/1112] remove globus snippet (already added) #10151 --- doc/release-notes/10162-globus-support.md | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 doc/release-notes/10162-globus-support.md diff --git a/doc/release-notes/10162-globus-support.md b/doc/release-notes/10162-globus-support.md deleted file mode 100644 index 60670b5b101..00000000000 --- a/doc/release-notes/10162-globus-support.md +++ /dev/null @@ -1,19 +0,0 @@ -Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible, -and for referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. -Further details can be found in the [Big Data Support section of the Dataverse Guides](https://guides.dataverse.org/en/latest/developers/big-data-support.html#big-data-support) -- Globus functionality remains 'experimental'/advanced in that it requires significant setup, differs in multiple ways from other file storage mechanisms, and may continue to evolve with the potential for backward incompatibilities. -- The functionality is configured per store and replaces the previous single-S3-Connector-per-Dataverse-instance model -- Adding files to a dataset, and accessing files is supported via the Dataverse user interface through a separate [dataverse-globus app](https://github.com/scholarsportal/dataverse-globus) -- The functionality is also accessible via APIs (combining calls to the Dataverse and Globus APIs) - -Backward Incompatibilities: -- The configuration for use of a Globus S3 Connector has changed and is aligned with the standard store configuration mechanism -- The new functionality is incompatible with older versions of the globus-dataverse app and the Globus-related functionality in the UI will only function correctly if a Dataverse 6.1 compatible version of the dataverse-globus app is configured. - -New JVM Options: -- A new 'globus' store type and associated store-related options have been added. These are described in the [File Storage Options section of the Dataverse Guides](https://guides.dataverse.org/en/latest/installation/config.html#file-storage-using-a-local-filesystem-and-or-swift-and-or-object-stores-and-or-trusted-remote-stores). -- dataverse.files.globus-cache-maxage - specifies the number of minutes Dataverse will wait between an initial request for a file transfer occurs and when that transfer must begin. - - - -Obsolete Settings: the :GlobusBasicToken, :GlobusEndpoint, and :GlobusStores settings are no longer used From 3e32f42959dce41e9c21c9e2285fdf719b048dc0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Dec 2023 14:57:52 -0500 Subject: [PATCH 0450/1112] link to guides in more places, other tweaks #10151 --- doc/release-notes/6.1-release-notes.md | 43 +++++++++++++------------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 475d4fc0887..fab11ce4959 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -13,12 +13,17 @@ Dataverse can now be configured (via the `dataverse.files.guestbook-at-request` The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default, showing guestbooks when files are downloaded, remains as it was in prior Dataverse versions. +For details, see [dataverse.files.guestbook-at-request](https://guides.dataverse.org/en/6.1/installation/config.html#dataverse-files-guestbook-at-request) and PR #9599. + ### Collection-level storage quotas This release adds support for defining storage size quotas for collections. Please see the API guide for details. This is an experimental feature that has not yet been used in production on any real life Dataverse instance, but we are planning to try it out at Harvard/IQSS. + Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the first, initial deployment of Dataverse 6.1. -### Globus support +For details, see [Storage Quotas for Collections](https://guides.dataverse.org/en/6.1/admin/collectionquotas.html) in the Admin Guide. + +### Globus support (experimental), continued Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible and for the case of referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. Please note: @@ -32,54 +37,50 @@ Backward incompatibilities: - The new functionality is incompatible with older versions of the globus-dataverse app and the Globus-related functionality in the UI will only function correctly if a Dataverse 6.1 compatible version of the dataverse-globus app is configured. New JVM options: -- A new "globus" store type and associated store-related options have been added. These are described in the [File Storage Options section of the Dataverse Guides](https://guides.dataverse.org/en/latest/installation/config.html#file-storage-using-a-local-filesystem-and-or-swift-and-or-object-stores-and-or-trusted-remote-stores). +- A new "globus" store type and associated store-related options have been added. These are described in the [File Storage](https://guides.dataverse.org/en/6.1/installation/config.html#file-storage) section of the Installation Guide. - dataverse.files.globus-cache-maxage - specifies the number of minutes Dataverse will wait between an initial request for a file transfer occurs and when that transfer must begin. Obsolete Settings: the :GlobusBasicToken, :GlobusEndpoint, and :GlobusStores settings are no longer used -Further details can be found in the [Big Data Support section of the Dataverse Guides](https://guides.dataverse.org/en/6.1/developers/big-data-support.html#big-data-support) +Further details can be found in the [Big Data Support](https://guides.dataverse.org/en/6.1/developers/big-data-support.html#big-data-support) section of the Developer Guide. ### Alternative Title now allows multiple values -Alternative Title now allows multiples. Note that JSON used to create a dataset with an Alternate Title must be changed. See "Backward incompatibilities" below for details. +Alternative Title now allows multiples. Note that JSON used to create a dataset with an Alternate Title must be changed. See "Backward incompatibilities" below and PR #9440 for details. ### External tools: configure tools now available at the dataset level -Read/write "configure" tools (a type of external tool) are now available at the dataset level. They appear under the "Edit Dataset" menu. See also #9589. +Read/write "configure" tools (a type of external tool) are now available at the dataset level. They appear under the "Edit Dataset" menu. See [External Tools](https://guides.dataverse.org/en/6.1/admin/external-tools.html#dataset-level-configure-tools) in the Admin Guide and PR #9925. ### S3 out-of-band upload In some situations, direct upload might not work from the UI, e.g., when s3 storage is not accessible from the internet. This pull request adds an option to [allow direct uploads via API only](https://github.com/IQSS/dataverse/pull/9003). This way, a third party application can use direct upload from within the internal network, while there is no direct download available to the users via UI. By default, Dataverse supports uploading files via the [add a file to a dataset](https://guides.dataverse.org/en/6.1/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). -With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. +With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://guides.dataverse.org/en/6.1/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://guides.dataverse.org/en/6.1/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. ### JSON Schema for datasets -Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. (Issue #9464 and #9465) +Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. See [Retrieve a Dataset JSON Schema for a Collection](https://guides.dataverse.org/en/6.1/api/native-api.html#retrieve-a-dataset-json-schema-for-a-collection) in the API Guide and PR #10109. -### OpenID Connect authentication provider improvements +### OpenID Connect (OIDC) improvements #### Using MicroProfile Config for provisioning -With this release it is possible to provision a single OIDC-based authentication provider -by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. +With this release it is possible to provision a single OIDC-based authentication provider by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. -If you are using an external OIDC provider component as an identity management system and/or broker -to other authentication providers such as Google, eduGain SAML and so on, this might make your -life easier during instance setups and reconfiguration. You no longer need to generate the -necessary JSON file. +If you are using an external OIDC provider component as an identity management system and/or broker to other authentication providers such as Google, eduGain SAML and so on, this might make your life easier during instance setups and reconfiguration. You no longer need to generate the necessary JSON file. #### Adding PKCE Support -[This PR adds PKCE support for OIDC providers](https://github.com/IQSS/dataverse/pull/9273) -Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable -support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) +Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) + +For both features, see the [OIDC](https://guides.dataverse.org/en/6.0/installation/oidc.html) section of the Installation Guide and PR #9273. ### Solr improvements As of this release, application-side support has been added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues. -Please see the "Installing Solr" section of the Installation Prerequisites guide. +Please see the [Installing Solr](https://guides.dataverse.org/en/6.1/installation/prerequisites.html#installing-solr) section of the Installation Guide. ### New release of Dataverse Previewers (including a Markdown previewer) @@ -87,7 +88,7 @@ Version 1.4 of the standard Dataverse Previewers from https://github/com/gdcc/da - SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. - Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. -- There is now a Markdown (.md) previewer: https://guides.dataverse.org/en/6.1/user/dataset-management.html#file-previews +- There is now a [Markdown (.md)](https://guides.dataverse.org/en/6.1/user/dataset-management.html#file-previews) previewer. ### New or improved APIs @@ -172,7 +173,7 @@ This parameter applies a filter criteria to the operation and supports the follo Changes and fixes in this release not already mentioned above include: -- Validation has been added for the Geographic Bounding Box values in the Geospatial metadata block. This will prevent improperly defined bounding boxes from being created via the edit page or metadata imports. (issue #9547). This also fixes the issue where existing datasets with invalid geoboxes were quietly failing to get reindexed. +- Validation has been added for the Geographic Bounding Box values in the Geospatial metadata block. This will prevent improperly defined bounding boxes from being created via the edit page or metadata imports. This also fixes the issue where existing datasets with invalid geoboxes were quietly failing to get reindexed. See PR #10142. - Dataverse's OAI_ORE Metadata Export format and archival BagIT exports (which include the OAI-ORE metadata export file) have been updated to include information about the dataset version state, e.g. RELEASED or DEACCESSIONED @@ -184,7 +185,7 @@ recreate datasets from archival Bags will start indicating which version(s) of t OAI_ORE format they can read. Dataverse installations that have been using archival Bags may wish to update any existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse -[archival Bag export API](https://guides.dataverse.org/en/latest/installation/config.html#bagit-export-api-calls) +[archival Bag export API](https://guides.dataverse.org/en/6.1/installation/config.html#bagit-export-api-calls) to generate updated versions. - For BagIT export, it is now possible to configure the following information in bag-info.txt. (Previously, customization was possible by editing `Bundle.properties` but this is no longer supported.) For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt - Source-Organization from `dataverse.bagit.sourceorg.name`. From 92a298da25c03822c848e5a43253f039193665f9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Dec 2023 15:42:55 -0500 Subject: [PATCH 0451/1112] add missing new config options and sort #10151 --- doc/release-notes/6.1-release-notes.md | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index fab11ce4959..1e09a207104 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -200,25 +200,28 @@ See also #10060. ## New configuration options -- dataverse.auth.oidc.enabled +- dataverse.auth.oidc.auth-server-url - dataverse.auth.oidc.client-id - dataverse.auth.oidc.client-secret -- dataverse.auth.oidc.auth-server-url +- dataverse.auth.oidc.enabled - dataverse.auth.oidc.pkce.enabled +- dataverse.auth.oidc.pkce.max-cache-age +- dataverse.auth.oidc.pkce.max-cache-size - dataverse.auth.oidc.pkce.method -- dataverse.auth.oidc.title - dataverse.auth.oidc.subtitle -- dataverse.auth.oidc.pkce.max-cache-size -- dataverse.auth.oidc.pkce.max-cache-age -- dataverse.files.{driverId}.upload-out-of-band +- dataverse.auth.oidc.title +- dataverse.bagit.sourceorg.address +- dataverse.bagit.sourceorg.address +- dataverse.bagit.sourceorg.name - dataverse.files.globus-cache-maxage - dataverse.files.guestbook-at-request +- dataverse.files.{driverId}.upload-out-of-band ## Backward incompatibilities - Since Alternative Title is now repeatable, the JSON you send to create or edit a dataset must be an array rather than a simple string. For example, instead of "value": "Alternative Title", you must send "value": ["Alternative Title1", "Alternative Title2"] - Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility. See above for details. -- For BagIT export, if you were configuring values in bag-info.txt using `Bundle.properties`, you must switch to the new JVM options `dataverse.bagit.sourceorg.name`, `dataverse.bagit.sourceorg.address`, and `dataverse.bagit.sourceorg.email`. For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt +- For BagIT export, if you were configuring values in bag-info.txt using `Bundle.properties`, you must switch to the new `dataverse.bagit` JVM options mentioned above. For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt - See "Globus support" above for backward incompatibilies specific to Globus. ## Complete list of changes From 80634c7a59d7bfce4ab0e871d80d34f446579123 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Dec 2023 15:54:30 -0500 Subject: [PATCH 0452/1112] address feedback from review #9919 --- doc/sphinx-guides/source/developers/performance.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/developers/performance.rst b/doc/sphinx-guides/source/developers/performance.rst index aa50cd6e40c..46c152f322e 100644 --- a/doc/sphinx-guides/source/developers/performance.rst +++ b/doc/sphinx-guides/source/developers/performance.rst @@ -116,12 +116,12 @@ We'd like to rate limit commands (CreateDataset, etc.) so that we can keep them Solr ~~~~ -While in the past Solr performance hasn't been much of a concern, in recent years we've noticed performance problems when Harvard Dataverse is under load. We are investigating in `#9635 `_. +While in the past Solr performance hasn't been much of a concern, in recent years we've noticed performance problems when Harvard Dataverse is under load. Improvements were made in `PR #10050 `_, for example. Datasets with Large Numbers of Files or Versions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We'd like to scale Dataverse to better handle large number of files or versions (`#9763 `_). +We'd like to scale Dataverse to better handle large number of files or versions. Progress was made in `PR #9883 `_. Withstanding Bots ~~~~~~~~~~~~~~~~~ @@ -183,7 +183,7 @@ Most likely there is training available that is oriented toward performance. The Learn from the Community How They Monitor Performance ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Some members of the Dataverse community are likely users of newish tools like the ELK stack (Elasticsearch, Logstash, and Kibana), the TICK stack (Telegraph InfluxDB Chronograph and Kapacitor), GoAccess, Prometheus, Graphite, and more we haven't even heard of. In the :doc:`/admin/monitoring` section of the Admin Guide, we already encourage the community to share findings (, but we could dedicate time to this topic at our annual meeting or community calls. +Some members of the Dataverse community are likely users of newish tools like the ELK stack (Elasticsearch, Logstash, and Kibana), the TICK stack (Telegraph InfluxDB Chronograph and Kapacitor), GoAccess, Prometheus, Graphite, and more we haven't even heard of. In the :doc:`/admin/monitoring` section of the Admin Guide, we already encourage the community to share findings, but we could dedicate time to this topic at our annual meeting or community calls. Teach the Community to Do Performance Testing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 3d6343eca2846edca97e4d9699f3305fb7c19c62 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Dec 2023 16:09:46 -0500 Subject: [PATCH 0453/1112] mention configurable docroot #10151 --- doc/release-notes/6.1-release-notes.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 1e09a207104..1279d09a023 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -195,6 +195,7 @@ to generate updated versions. - To fix #9952, we surround the license info with `<` and `>`. - To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information - To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. +- It's now possible to configure the docroot, which holds collection logos and more. See [dataverse.files.docroot](https://guides.dataverse.org/en/6.1/installation/config.html#dataverse-files-docroot) in the Installation Guide and PR #9819. - We have started maintaining an API changelog of breaking changes: https://guides.dataverse.org/en/6.1/api/changelog.html See also #10060. @@ -213,6 +214,7 @@ See also #10060. - dataverse.bagit.sourceorg.address - dataverse.bagit.sourceorg.address - dataverse.bagit.sourceorg.name +- dataverse.files.docroot - dataverse.files.globus-cache-maxage - dataverse.files.guestbook-at-request - dataverse.files.{driverId}.upload-out-of-band From fa32ef5a413f6b0fbfab7d6e96e602a31bc18ac4 Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Tue, 12 Dec 2023 11:36:52 +0000 Subject: [PATCH 0454/1112] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 99438520120..1e86f24356b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2686,7 +2686,7 @@ In particular, the user permissions that this API call checks, returned as boole curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/datasets/$ID/userPermissions" -Know if a User can download at least one File from a Dataset Version +Know If a User Can Download at Least One File from a Dataset Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This API call allows to know if the calling user can download at least one file of a dataset version. From 476977b48925ae6eae4dabf69b0de0d7d40d6841 Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Tue, 12 Dec 2023 11:37:01 +0000 Subject: [PATCH 0455/1112] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 1e86f24356b..9ceeb4410ef 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2689,7 +2689,7 @@ In particular, the user permissions that this API call checks, returned as boole Know If a User Can Download at Least One File from a Dataset Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This API call allows to know if the calling user can download at least one file of a dataset version. +This API endpoint indicates if the calling user can download at least one file from a dataset version. Note that Shibboleth group permissions are not considered. .. code-block:: bash From 64861afbc11c4475ca3d85e729f4b73e962d5efa Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Tue, 12 Dec 2023 11:37:36 +0000 Subject: [PATCH 0456/1112] Update doc/release-notes/10155-datasets-can-download-at-least-one-file.md Co-authored-by: Philip Durbin --- .../10155-datasets-can-download-at-least-one-file.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10155-datasets-can-download-at-least-one-file.md b/doc/release-notes/10155-datasets-can-download-at-least-one-file.md index 566d505f7ca..a0b0d02310a 100644 --- a/doc/release-notes/10155-datasets-can-download-at-least-one-file.md +++ b/doc/release-notes/10155-datasets-can-download-at-least-one-file.md @@ -1,3 +1,3 @@ The getCanDownloadAtLeastOneFile (/api/datasets/{id}/versions/{versionId}/canDownloadAtLeastOneFile) endpoint has been created. -This endpoint allows to know if the calling user can download at least one file of a particular dataset version. +This API endpoint indicates if the calling user can download at least one file from a dataset version. Note that Shibboleth group permissions are not considered. From 39e4bcee0f164854301b45f0ba6cbd4e11b4cf5c Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Dec 2023 13:42:46 +0000 Subject: [PATCH 0457/1112] Fixed: minio storage volume mapping --- docker-compose-dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 5265a6b7c2d..6f8decc0dfb 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -209,7 +209,7 @@ services: networks: - dataverse volumes: - - minio_storage:/data + - ./docker-dev-volumes/minio_storage:/data environment: MINIO_ROOT_USER: 4cc355_k3y MINIO_ROOT_PASSWORD: s3cr3t_4cc355_k3y From 0c279adc3e93bd09bedc08a3f1bda48876fc1de3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Dec 2023 13:50:08 +0000 Subject: [PATCH 0458/1112] Removed: sleep calls from testGetCanDownloadAtLeastOneFile IT --- .../java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b2cf5c75467..f36b93b85ab 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -4225,7 +4225,7 @@ public void testGetGlobusUploadParameters() { } @Test - public void testGetCanDownloadAtLeastOneFile() throws InterruptedException { + public void testGetCanDownloadAtLeastOneFile() { Response createUserResponse = UtilIT.createRandomUser(); createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); @@ -4252,9 +4252,6 @@ public void testGetCanDownloadAtLeastOneFile() throws InterruptedException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Make sure the dataset is published - Thread.sleep(3000); - // Create a second user to call the getCanDownloadAtLeastOneFile method Response createSecondUserResponse = UtilIT.createRandomUser(); createSecondUserResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -4275,9 +4272,6 @@ public void testGetCanDownloadAtLeastOneFile() throws InterruptedException { publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Make sure the dataset is published - Thread.sleep(3000); - // Call with a valid dataset id when a file is restricted and the user does not have access canDownloadAtLeastOneFileResponse = UtilIT.getCanDownloadAtLeastOneFile(Integer.toString(datasetId), DS_VERSION_LATEST, secondUserApiToken); canDownloadAtLeastOneFileResponse.then().assertThat().statusCode(OK.getStatusCode()); From 960a20c79dc8a3292ff3d26973d8e35d8a4f481c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Dec 2023 14:06:21 -0500 Subject: [PATCH 0459/1112] #10168 fix error response status --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b3bfc476423..05355cbbc68 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4288,7 +4288,7 @@ public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext headers); if (dsv.getArchivalCopyLocation() == null) { - return error(Status.NO_CONTENT, "This dataset version has not been archived"); + return error(Status.NOT_FOUND, "This dataset version has not been archived"); } else { JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation()); return ok(status); From 40e5d39c73ec2097fb16d65e8fff33078168498b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 12 Dec 2023 14:53:45 -0500 Subject: [PATCH 0460/1112] how to test Docker images made during a release --- .../source/developers/making-releases.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 23c4773a06e..432b4ca2672 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -67,6 +67,19 @@ Once important tests have passed (compile, unit tests, etc.), merge the pull req If this is a hotfix release, skip this whole "merge develop to master" step (the "develop" branch is not involved until later). +(Optional) Test Docker Images +----------------------------- + +After the "master" branch has been updated and the GitHub Action to build and push Docker images has run (see `PR #9776 `_), go to https://hub.docker.com/u/gdcc and make sure the "alpha" tag for the following images has been updated: + +- https://hub.docker.com/r/gdcc/base +- https://hub.docker.com/r/gdcc/dataverse +- https://hub.docker.com/r/gdcc/configbaker + +To test these images against our API test suite, go to the "alpha" workflow at https://github.com/gdcc/api-test-runner/actions/workflows/alpha.yml and run it. + +If there are failures, additional dependencies or settings may have been added to the "develop" workflow. Copy them over and try again. + Build the Guides for the Release -------------------------------- From a240bd0fa81cc4a9db0cc9c8ddb37ad733324fcd Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 12 Dec 2023 15:20:07 -0500 Subject: [PATCH 0461/1112] bump htmlunit to 3.9.0 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 34b0ad2e835..d690e509f46 100644 --- a/pom.xml +++ b/pom.xml @@ -650,7 +650,7 @@ org.htmlunit htmlunit - 3.2.0 + 3.9.0 test From b1f15bb95ff58dd62c7aaa1a2ababa1f44b83881 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 12 Dec 2023 15:30:54 -0500 Subject: [PATCH 0462/1112] bump DuraCloud to 8.0.0 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 34b0ad2e835..be4fa605aab 100644 --- a/pom.xml +++ b/pom.xml @@ -466,7 +466,7 @@ org.duracloud common - 7.1.1 + 8.0.0 org.slf4j @@ -481,7 +481,7 @@ org.duracloud storeclient - 7.1.1 + 8.0.0 org.slf4j From daf89261174600b1db106974cc941213fa0b36bd Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Dec 2023 15:37:27 -0500 Subject: [PATCH 0463/1112] #10168 update integration tests --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 928574eb82b..7efd44b9533 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3291,7 +3291,8 @@ public void testArchivalStatusAPI() throws IOException { //Verify the status is empty Response nullStatus = UtilIT.getDatasetVersionArchivalStatus(datasetId, "1.0", apiToken); - nullStatus.then().assertThat().statusCode(NO_CONTENT.getStatusCode()); + nullStatus.prettyPrint(); + nullStatus.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); //Set it Response setStatus = UtilIT.setDatasetVersionArchivalStatus(datasetId, "1.0", apiToken, "pending", @@ -3309,7 +3310,7 @@ public void testArchivalStatusAPI() throws IOException { //Make sure it's gone Response nullStatus2 = UtilIT.getDatasetVersionArchivalStatus(datasetId, "1.0", apiToken); - nullStatus2.then().assertThat().statusCode(NO_CONTENT.getStatusCode()); + nullStatus2.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } From 2ce0fb8f083ef8dfedfb71feea0d58ff2f9c7647 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 12 Dec 2023 16:06:52 -0500 Subject: [PATCH 0464/1112] bump google.cloud.version to 0.209.0 --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 7b305cad581..25d714b39ed 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -152,7 +152,7 @@ 42.6.0 9.3.0 1.12.290 - 0.177.0 + 0.209.0 8.0.0 From 349f7dbcaaaf260c00126567f9f4c6d32b0c367c Mon Sep 17 00:00:00 2001 From: sbondka Date: Wed, 13 Dec 2023 15:31:31 +0100 Subject: [PATCH 0465/1112] Add presentation link --- doc/sphinx-guides/source/admin/integrations.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index ed3860a9ca1..53a663b942e 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -193,7 +193,7 @@ This connector facilitates seamless two-way transfer of datasets and files, emph It is a lightweight client-side web application built using React and relying on the Dataverse External Tool feature, allowing for easy deployment on modern integration systems. Currently, it supports small to medium-sized files, with plans to enable support for large files and signed Dataverse endpoints in the future. What kind of user is the feature intended for? -The feature is intended for researchers, scientists and data analyst who are working with Dataverse instances and JupyterHub looking to ease the data transfer process. +The feature is intended for researchers, scientists and data analyst who are working with Dataverse instances and JupyterHub looking to ease the data transfer process. See `presentation `_ for details. .. _integrations-discovery: From ea644b89a3149ff8599fe3fcaa3a2bf6f5804e71 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 13 Dec 2023 14:16:47 -0500 Subject: [PATCH 0466/1112] add "message sent" success message #2638 --- src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java | 2 ++ src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/contactFormFragment.xhtml | 2 +- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java index 6be768321c4..68912969003 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java +++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.JsfHelper; import edu.harvard.iq.dataverse.util.MailUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.Optional; @@ -217,6 +218,7 @@ public String sendMessage() { } logger.fine("sending feedback: " + feedback); mailService.sendMail(feedback.getFromEmail(), feedback.getToEmail(), feedback.getCcEmail(), feedback.getSubject(), feedback.getBody()); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("contact.sent")); return null; } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 10576c0c116..0c6ce979a94 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -184,6 +184,7 @@ contact.context.file.intro={0}\n\nYou have just been sent the following message contact.context.file.ending=\n\n---\n\n{0}\n{1}\n\nGo to file {2}/file.xhtml?fileId={3}\n\nYou received this email because you have been listed as a contact for the dataset. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email. contact.context.support.intro={0},\n\nThe following message was sent from {1}.\n\n---\n\n contact.context.support.ending=\n\n---\n\nMessage sent from Support contact form. +contact.sent=Message sent. # dataverseuser.xhtml account.info=Account Information diff --git a/src/main/webapp/contactFormFragment.xhtml b/src/main/webapp/contactFormFragment.xhtml index cb4eb3d0872..8950ec5acf8 100644 --- a/src/main/webapp/contactFormFragment.xhtml +++ b/src/main/webapp/contactFormFragment.xhtml @@ -81,7 +81,7 @@
    + update="@form,messagePanel" oncomplete="if (args && !args.validationFailed) PF('contactForm').hide();" actionListener="#{sendFeedbackDialog.sendMessage}">
    From 057d1b926513a4716737a4b766a8fb46e709d44e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 4 Jan 2024 09:05:21 -0500 Subject: [PATCH 0499/1112] add docker compose config to get HarvestingServerIT to pass #9275 --- docker-compose-dev.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 6f8decc0dfb..ce9f39a418a 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -19,6 +19,9 @@ services: DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL: http://keycloak.mydomain.com:8090/realms/test DATAVERSE_JSF_REFRESH_PERIOD: "1" + # to get HarvestingServerIT to pass + dataverse_oai_server_maxidentifiers: "2" + dataverse_oai_server_maxrecords: "2" JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 -Ddataverse.files.file1.type=file -Ddataverse.files.file1.label=Filesystem From 37d3d41a51867758cac611215f830ad2af1d31a1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 4 Jan 2024 09:11:41 -0500 Subject: [PATCH 0500/1112] assert 500 error when invalid query params are passed #9275 --- .../harvard/iq/dataverse/api/HarvestingServerIT.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index e02964ef28f..07788eca6db 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -860,7 +860,16 @@ public void testMultiRecordOaiSet() throws InterruptedException { logger.info("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); assertEquals(200, deleteResponse.getStatusCode(), "Failed to delete the control multi-record set"); } - + + @Test + public void testInvalidQueryParams() { + // "foo" is not a valid verb + String oaiVerbPath = "/oai?foo=bar"; + Response identifyResponse = given().get(oaiVerbPath); + // TODO Why is this 500? https://github.com/IQSS/dataverse/issues/9275 + identifyResponse.then().assertThat().statusCode(500); + } + // TODO: // What else can we test? // Some ideas: From 2ab5ba99a357fa88f44fe72201f827cb26cff448 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 4 Jan 2024 10:50:15 -0500 Subject: [PATCH 0501/1112] #9686 update migration script --- ...gclient-id.sql => V6.1.0.1__9686-move-harvestingclient-id.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.5__9686-move-harvestingclient-id.sql => V6.1.0.1__9686-move-harvestingclient-id.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.5__9686-move-harvestingclient-id.sql b/src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.5__9686-move-harvestingclient-id.sql rename to src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql From 27fa15458cf9d68192a3e0eed53f43371990de8e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 4 Jan 2024 16:21:16 -0500 Subject: [PATCH 0502/1112] show errors (in XML) for verb params #9275 --- .../9275-harvest-invalid-query-params.md | 4 +++ .../server/web/servlet/OAIServlet.java | 18 ++++++++-- .../iq/dataverse/api/HarvestingServerIT.java | 34 ++++++++++++++++--- 3 files changed, 48 insertions(+), 8 deletions(-) create mode 100644 doc/release-notes/9275-harvest-invalid-query-params.md diff --git a/doc/release-notes/9275-harvest-invalid-query-params.md b/doc/release-notes/9275-harvest-invalid-query-params.md new file mode 100644 index 00000000000..33d7c7bac13 --- /dev/null +++ b/doc/release-notes/9275-harvest-invalid-query-params.md @@ -0,0 +1,4 @@ +OAI-PMH error handling has been improved to display a machine-readable error in XML rather than a 500 error with no further information. + +- /oai?foo=bar will show "No argument 'verb' found" +- /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index 96a19acc0e8..34152a2d8bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -31,8 +31,11 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.MailUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import io.gdcc.xoai.exceptions.BadArgumentException; +import io.gdcc.xoai.exceptions.BadVerbException; import io.gdcc.xoai.exceptions.OAIException; import io.gdcc.xoai.model.oaipmh.Granularity; +import io.gdcc.xoai.model.oaipmh.verbs.Verb; import io.gdcc.xoai.services.impl.SimpleResumptionTokenFormat; import org.apache.commons.lang3.StringUtils; @@ -256,9 +259,18 @@ private void processRequest(HttpServletRequest httpServletRequest, HttpServletRe "Sorry. OAI Service is disabled on this Dataverse node."); return; } - - RawRequest rawRequest = RequestBuilder.buildRawRequest(httpServletRequest.getParameterMap()); - + + RawRequest rawRequest = null; + try { + rawRequest = RequestBuilder.buildRawRequest(httpServletRequest.getParameterMap()); + } catch (BadVerbException bve) { + // Verb.Type is required. Hard-code one. + rawRequest = new RawRequest(Verb.Type.Identify); + // Ideally, withError would accept a BadVerbException. + BadArgumentException bae = new BadArgumentException(bve.getLocalizedMessage()); + rawRequest.withError(bae); + } + OAIPMH handle = dataProvider.handle(rawRequest); response.setContentType("text/xml;charset=UTF-8"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index 07788eca6db..3936a240826 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -863,11 +863,35 @@ public void testMultiRecordOaiSet() throws InterruptedException { @Test public void testInvalidQueryParams() { - // "foo" is not a valid verb - String oaiVerbPath = "/oai?foo=bar"; - Response identifyResponse = given().get(oaiVerbPath); - // TODO Why is this 500? https://github.com/IQSS/dataverse/issues/9275 - identifyResponse.then().assertThat().statusCode(500); + + // The query parameter "verb" must appear. + Response noVerbArg = given().get("/oai?foo=bar"); + noVerbArg.prettyPrint(); + noVerbArg.then().assertThat() + .statusCode(OK.getStatusCode()) + // This should be "badVerb" + .body("oai.error.@code", equalTo("badArgument")) + .body("oai.error", equalTo("No argument 'verb' found")); + + // The query parameter "verb" cannot appear more than once. + Response repeated = given().get( "/oai?verb=foo&verb=bar"); + repeated.prettyPrint(); + repeated.then().assertThat() + .statusCode(OK.getStatusCode()) + // This should be "badVerb" + .body("oai.error.@code", equalTo("badArgument")) + .body("oai.error", equalTo("Verb must be singular, given: '[foo, bar]'")); + + } + + @Test + public void testNoSuchSetError() { + Response noSuchSet = given().get("/oai?verb=ListIdentifiers&set=census&metadataPrefix=dc"); + noSuchSet.prettyPrint(); + noSuchSet.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("oai.error.@code", equalTo("noRecordsMatch")) + .body("oai.error", equalTo("Requested set 'census' does not exist")); } // TODO: From 6db3e3b9c64a0163c52b3cf988669d9bfd3a919f Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 4 Jan 2024 16:42:16 -0500 Subject: [PATCH 0503/1112] Fix for "latest" dataset version --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- .../impl/GetLatestAccessibleDatasetVersionCommand.java | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 094f2b88c92..83b1a4e861b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2796,7 +2796,7 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String @Override public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned); + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index 96e8ee73a50..7bcc851bde2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -25,15 +25,17 @@ public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final boolean includeDeaccessioned; + private boolean checkPerms; public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { - this(aRequest, anAffectedDataset, false); + this(aRequest, anAffectedDataset, false, false); } - public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) { + public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPerms) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; this.includeDeaccessioned = includeDeaccessioned; + this.checkPerms = checkPerms; } @Override @@ -41,6 +43,6 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); } - return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned, true)); + return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned, checkPerms)); } } From d017bf6843189a0228ff1be229614ba7685fcf0b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 5 Jan 2024 11:48:00 -0500 Subject: [PATCH 0504/1112] #9686 assign harvest client id to harvested files --- .../harvard/iq/dataverse/api/imports/ImportServiceBean.java | 5 +++++ .../harvest/client/HarvestingClientServiceBean.java | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index c17ba909230..c5812403f31 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -332,6 +332,11 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId().asString()); + //adding the harvesting client id to harvested files #9686 + for (DataFile df : ds.getFiles()){ + df.setHarvestedFrom(harvestingClient); + } + if (existingDs != null) { // If this dataset already exists IN ANOTHER DATAVERSE // we are just going to skip it! diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java index 7ec6d75a41c..5747c64d217 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java @@ -199,8 +199,8 @@ public void recordHarvestJobStatus(Long hcId, Date finishTime, int harvestedCoun public Long getNumberOfHarvestedDatasetsByAllClients() { try { - return (Long) em.createNativeQuery("SELECT count(d.id) FROM dataset d " - + " WHERE d.harvestingclient_id IS NOT NULL").getSingleResult(); + return (Long) em.createNativeQuery("SELECT count(d.id) FROM dvobject d " + + " WHERE d.harvestingclient_id IS NOT NULL and d.dtype = 'Dataset'").getSingleResult(); } catch (Exception ex) { logger.info("Warning: exception looking up the total number of harvested datasets: " + ex.getMessage()); From e085ca926274a4688faeb61f842c319ffc41b538 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 5 Jan 2024 15:27:06 -0500 Subject: [PATCH 0505/1112] Adds test to cover latest, latest published and specific scenarios. --- .../harvard/iq/dataverse/api/DatasetsIT.java | 302 +++++++++++++++--- 1 file changed, 249 insertions(+), 53 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 200cfbaf1ff..9ac05ce5704 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -70,6 +70,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItems; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.contains; @@ -613,6 +614,7 @@ public void testCreatePublishDestroyDataset() { */ @Test public void testDatasetVersionsAPI() { + // Create user String apiToken = UtilIT.createRandomUserGetToken(); @@ -650,6 +652,11 @@ public void testDatasetVersionsAPI() { .statusCode(OK.getStatusCode()) .body("data.files", equalTo(null)); + unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiTokenNoPerms, excludeFiles, false); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()); + excludeFiles = false; unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken, excludeFiles, false); unpublishedDraft.prettyPrint(); @@ -657,7 +664,11 @@ public void testDatasetVersionsAPI() { .statusCode(OK.getStatusCode()) .body("data.files.size()", equalTo(1)); - + unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiTokenNoPerms, excludeFiles, false); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()); + // Publish collection and dataset UtilIT.publishDataverseViaNativeApi(collectionAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); @@ -680,7 +691,8 @@ public void testDatasetVersionsAPI() { .body("data.size()", equalTo(2)) .body("data[0].files.size()", equalTo(2)) .body("data[1].files.size()", equalTo(1)); - + + // Now call this api with the new (as of 6.1) pagination parameters Integer offset = 0; Integer howmany = 1; @@ -690,15 +702,16 @@ public void testDatasetVersionsAPI() { versionsResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(1)) + .body("data.versionState[0]", equalTo("DRAFT")) .body("data[0].files.size()", equalTo(2)); // And now call it with an un-privileged token, to make sure only one - // (the published) version is shown: - + // (the published) version is shown: versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms); versionsResponse.prettyPrint(); versionsResponse.then().assertThat() .statusCode(OK.getStatusCode()) + .body("data.versionState[0]", not("DRAFT")) .body("data.size()", equalTo(1)); // And now call the "short", no-files version of the same api @@ -711,35 +724,98 @@ public void testDatasetVersionsAPI() { - //Set of tests on non-deaccesioned dataset - - boolean includeDeaccessioned = true; - excludeFiles = true; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data[0].files", equalTo(null)); - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data[0].files", equalTo(null)); - - excludeFiles = false; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data.files.size()", equalTo(1)); - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data.files.size()", equalTo(1)); + //Set of tests on non-deaccesioned dataset + String specificVersion = "1.0"; + boolean includeDeaccessioned = false; + Response datasetVersion = null; - includeDeaccessioned = false; excludeFiles = true; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data[0].files", equalTo(null)); - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data[0].files", equalTo(null)); + //Latest published authorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files", equalTo(null)); + + //Latest published unauthorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files", equalTo(null)); + + //Latest authorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DRAFT")) + .body("data.files", equalTo(null)); + + //Latest unauthorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files", equalTo(null)); + + //Specific version authorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files", equalTo(null)); + + //Specific version unauthorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files", equalTo(null)); excludeFiles = false; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data.files.size()", equalTo(1)); - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data.files.size()", equalTo(1)); - + //Latest published authorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files.size()", equalTo(1)); + + //Latest published unauthorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files.size()", equalTo(1)); + + //Latest authorized token, user is authenticated should get the Draft version + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DRAFT")) + .body("data.files.size()", equalTo(2)); + + //Latest unauthorized token, user has no permissions should get the latest Published version + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files.size()", equalTo(1)); + + //Specific version authorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files.size()", equalTo(1)); + + //Specific version unauthorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.files.size()", equalTo(1)); + //We deaccession the dataset Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -747,38 +823,158 @@ public void testDatasetVersionsAPI() { //Set of tests on deaccesioned dataset, only 3/9 should return OK message includeDeaccessioned = true; - excludeFiles = true; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data[0].files", equalTo(null)); - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data[0].files", equalTo(null)); excludeFiles = false; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(OK.getStatusCode()).body("data.files.size()", equalTo(1));; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(404); - - includeDeaccessioned = false; - excludeFiles = true; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(404); - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(404); - excludeFiles = false; - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(404); - UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned). - then().assertThat().statusCode(404); - + //Latest published authorized token with deaccessioned dataset + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DEACCESSIONED")) + .body("data.files.size()", equalTo(1)); + + //Latest published requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Latest authorized token should get the DRAFT version + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DRAFT")) + .body("data.files.size()", equalTo(2)); + + //Latest unauthorized token requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Specific version authorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DEACCESSIONED")) + .body("data.files.size()", equalTo(1)); + + //Specific version unauthorized token requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets. + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + excludeFiles = true; + //Latest published exclude files authorized token with deaccessioned dataset + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DEACCESSIONED")) + .body("data.files", equalTo(null)); + + //Latest published exclude files, should get the DEACCESSIONED version + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DEACCESSIONED")) + .body("data.files", equalTo(null)); + + //Latest authorized token should get the DRAFT version with no files + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DRAFT")) + .body("data.files", equalTo(null)); + + //Latest unauthorized token excluding files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DEACCESSIONED")) + .body("data.files", equalTo(null)); + + //Specific version authorized token + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DEACCESSIONED")) + .body("data.files", equalTo(null)); + + //Specific version unauthorized token requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets. + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DEACCESSIONED")) + .body("data.files", equalTo(null)); + + //Set of test when we have a deaccessioned dataset but we don't include deaccessioned + includeDeaccessioned = false; + excludeFiles = false; + //Latest published authorized token with deaccessioned dataset not included + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Latest published unauthorized token with deaccessioned dataset not included + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Latest authorized token should get the DRAFT version + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DRAFT")) + .body("data.files.size()", equalTo(2)); + + //Latest unauthorized token one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Specific version authorized token, the version is DEACCESSIONED so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Specific version unauthorized token, the version is DEACCESSIONED so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); - - + excludeFiles = true; - + //Latest published authorized token with deaccessioned dataset not included + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Latest published unauthorized token with deaccessioned dataset not included + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Latest authorized token should get the DRAFT version + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DRAFT")) + .body("data.files", equalTo(null)); + + //Latest unauthorized token one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Specific version authorized token, the version is DEACCESSIONED so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + //Specific version unauthorized token, the version is DEACCESSIONED so shouldn't get any datasets + datasetVersion = UtilIT.getDatasetVersion(datasetPid, specificVersion, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } From 2001f5206c922062bdc4b419fe4022b2aaa33875 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 5 Jan 2024 16:06:37 -0500 Subject: [PATCH 0506/1112] quick preliminary fixes/work in progress #3437 --- .../harvest/server/OAIRecordServiceBean.java | 44 ++++++++++++------- .../harvest/server/OAISetServiceBean.java | 5 ++- 2 files changed, 32 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java index 1b4a7bc7db0..56c19e004dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java @@ -55,13 +55,8 @@ public class OAIRecordServiceBean implements java.io.Serializable { EntityManager em; private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean"); - - public void updateOaiRecords(String setName, List datasetIds, Date updateTime, boolean doExport) { - updateOaiRecords(setName, datasetIds, updateTime, doExport, logger); - } - - public void updateOaiRecords(String setName, List datasetIds, Date updateTime, boolean doExport, Logger setUpdateLogger) { - + + public void updateOaiRecords(String setName, List datasetIds, Date updateTime, boolean doExport, boolean confirmed, Logger setUpdateLogger) { // create Map of OaiRecords List oaiRecords = findOaiRecordsBySetName(setName); Map recordMap = new HashMap<>(); @@ -101,9 +96,6 @@ public void updateOaiRecords(String setName, List datasetIds, Date updateT DatasetVersion releasedVersion = dataset.getReleasedVersion(); Date publicationDate = releasedVersion == null ? null : releasedVersion.getReleaseTime(); - //if (dataset.getPublicationDate() != null - // && (dataset.getLastExportTime() == null - // || dataset.getLastExportTime().before(dataset.getPublicationDate()))) { if (publicationDate != null && (dataset.getLastExportTime() == null || dataset.getLastExportTime().before(publicationDate))) { @@ -125,7 +117,9 @@ public void updateOaiRecords(String setName, List datasetIds, Date updateT } // anything left in the map should be marked as removed! - markOaiRecordsAsRemoved( recordMap.values(), updateTime, setUpdateLogger); + markOaiRecordsAsRemoved(recordMap.values(), updateTime, confirmed, setUpdateLogger); + + } @@ -162,7 +156,7 @@ record = new OAIRecord(setName, dataset.getGlobalId().asString(), new Date()); } } - + /* // Updates any existing OAI records for this dataset // Should be called whenever there's a change in the release status of the Dataset // (i.e., when it's published or deaccessioned), so that the timestamps and @@ -201,13 +195,31 @@ public void updateOaiRecordsForDataset(Dataset dataset) { logger.fine("Null returned - no records found."); } } +*/ - public void markOaiRecordsAsRemoved(Collection records, Date updateTime, Logger setUpdateLogger) { + public void markOaiRecordsAsRemoved(Collection records, Date updateTime, boolean confirmed, Logger setUpdateLogger) { for (OAIRecord oaiRecord : records) { if ( !oaiRecord.isRemoved() ) { - setUpdateLogger.fine("marking OAI record "+oaiRecord.getGlobalId()+" as removed"); - oaiRecord.setRemoved(true); - oaiRecord.setLastUpdateTime(updateTime); + boolean confirmedRemoved = confirmed; + if (!confirmedRemoved) { + Dataset lookedUp = datasetService.findByGlobalId(oaiRecord.getGlobalId()); + if (lookedUp == null) { + confirmedRemoved = true; + } else if (lookedUp.getLastExportTime() == null) { + confirmedRemoved = true; + } else { + boolean isReleased = lookedUp.getReleasedVersion() != null; + if (!isReleased) { + confirmedRemoved = true; + } + } + } + + if (confirmedRemoved) { + setUpdateLogger.fine("marking OAI record "+oaiRecord.getGlobalId()+" as removed"); + oaiRecord.setRemoved(true); + oaiRecord.setLastUpdateTime(updateTime); + } } else { setUpdateLogger.fine("OAI record "+oaiRecord.getGlobalId()+" is already marked as removed."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java index 2bd666401c7..9020a09abdd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java @@ -151,6 +151,8 @@ public void exportOaiSet(OAISet oaiSet, Logger exportLogger) { String query = managedSet.getDefinition(); List datasetIds; + boolean databaseLookup = false; // As opposed to a search engine lookup + try { if (!oaiSet.isDefaultSet()) { datasetIds = expandSetQuery(query); @@ -161,6 +163,7 @@ public void exportOaiSet(OAISet oaiSet, Logger exportLogger) { // including the unpublished drafts and deaccessioned ones. // Those will be filtered out further down the line. datasetIds = datasetService.findAllLocalDatasetIds(); + databaseLookup = true; } } catch (OaiSetException ose) { datasetIds = null; @@ -171,7 +174,7 @@ public void exportOaiSet(OAISet oaiSet, Logger exportLogger) { // they will be properly marked as "deleted"! -- L.A. 4.5 //if (datasetIds != null && !datasetIds.isEmpty()) { exportLogger.info("Calling OAI Record Service to re-export " + datasetIds.size() + " datasets."); - oaiRecordService.updateOaiRecords(managedSet.getSpec(), datasetIds, new Date(), true, exportLogger); + oaiRecordService.updateOaiRecords(managedSet.getSpec(), datasetIds, new Date(), true, databaseLookup, exportLogger); //} managedSet.setUpdateInProgress(false); From 4db74b6e5ddd3cf7f2ee49b94b9b229e2746bd35 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 5 Jan 2024 16:20:27 -0500 Subject: [PATCH 0507/1112] how to write release note snippets #9264 --- .../source/developers/making-releases.rst | 10 ++-- .../source/developers/version-control.rst | 54 ++++++++++++++++--- 2 files changed, 54 insertions(+), 10 deletions(-) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index e73811a77e1..6b94282d55e 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -14,16 +14,18 @@ See :doc:`version-control` for background on our branching strategy. The steps below describe making both regular releases and hotfix releases. +.. _write-release-notes: + Write Release Notes ------------------- -Developers express the need for an addition to release notes by creating a file in ``/doc/release-notes`` containing the name of the issue they're working on. The name of the branch could be used for the filename with ".md" appended (release notes are written in Markdown) such as ``5053-apis-custom-homepage.md``. +Developers express the need for an addition to release notes by creating a "release note snippet" in ``/doc/release-notes`` containing the name of the issue they're working on. The name of the branch could be used for the filename with ".md" appended (release notes are written in Markdown) such as ``5053-apis-custom-homepage.md``. See :ref:`writing-release-note-snippets` for how this is described for contributors. -The task at or near release time is to collect these notes into a single doc. +The task at or near release time is to collect these snippets into a single file. - Create an issue in GitHub to track the work of creating release notes for the upcoming release. -- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the issue-specific release notes mentioned above. -- Delete the previously-created, issue-specific release notes as the content is added to the main release notes file. +- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above. +- Delete the release note snippets as the content is added to the main release notes file. - Include instructions to describe the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. - Take the release notes .md through the regular Code Review and QA process. diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index 91f59c76e61..12f3d5b81fd 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -65,23 +65,65 @@ The example of creating a pull request below has to do with fixing an important Find or Create a GitHub Issue ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Issue is a bug (unexpected behavior) or a new feature in Dataverse, to know how to find or create an issue in dataverse please see https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md +An issue represents a bug (unexpected behavior) or a new feature in Dataverse. We'll use the issue number in the branch we create for our pull request. -For guidance on which issue to work on, please ask! with email to support@dataverse.org +Finding GitHub Issues to Work On +******************************** -Let's say you want to tackle https://github.com/IQSS/dataverse/issues/3728 which points out a typo in a page of the Dataverse Software's documentation. +Assuming this is your first contribution to Dataverse, you should start with something small. The following issue labels might be helpful in your search: + +- `good first issue `_ (these appear at https://github.com/IQSS/dataverse/contribute ) +- `hacktoberfest `_ +- `Help Wanted: Code `_ +- `Help Wanted: Documentation `_ + +For guidance on which issue to work on, please ask! :ref:`getting-help-developers` explains how to get in touch. + +Creating GitHub Issues to Work On +********************************* + +You are very welcome to create a GitHub issue to work on. However, for significant changes, please reach out (see :ref:`getting-help-developers`) to make sure the team and community agree with the proposed change. + +For small changes and especially typo fixes, please don't worry about reaching out first. + +Communicate Which Issue You Are Working On +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In the issue you can simply leave a comment to say you're working on it. If you tell us your GitHub username we are happy to add you to the "read only" team at https://github.com/orgs/IQSS/teams/dataverse-readonly/members so that we can assign the issue to you while you're working on it. You can also tell us if you'd like to be added to the `Dataverse Community Contributors spreadsheet `_. -Create a New Branch off the develop Branch +Create a New Branch Off the develop Branch ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Always create your feature branch from the latest code in develop, pulling the latest code if necessary. As mentioned above, your branch should have a name like "3728-doc-apipolicy-fix" that starts with the issue number you are addressing, and ends with a short, descriptive name. Dashes ("-") and underscores ("_") in your branch name are ok, but please try to avoid other special characters such as ampersands ("&") that have special meaning in Unix shells. +Always create your feature branch from the latest code in develop, pulling the latest code if necessary. As mentioned above, your branch should have a name like "3728-doc-apipolicy-fix" that starts with the issue number you are addressing (e.g. `#3728 `_) and ends with a short, descriptive name. Dashes ("-") and underscores ("_") in your branch name are ok, but please try to avoid other special characters such as ampersands ("&") that have special meaning in Unix shells. Commit Your Change to Your New Branch ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Making a commit (or several commits) to that branch, enter a description of the changes you have made. Ideally the first line of your commit message includes the number of the issue you are addressing, such as ``Fixed BlockedApiPolicy #3728``. +For each commit to that branch, try to include the issue number along with a summary in the first line of the commit message, such as ``Fixed BlockedApiPolicy #3728``. You are welcome to write longer descriptions in the body as well! + +.. _writing-release-note-snippets: + +Writing a Release Note Snippet +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We highly value your insight as a contributor when in comes to describing your work in our release notes. Not every pull request will be mentioned in release notes but most are. + +As described at :ref:`write-release-notes`, at release time we compile together release note "snippets" into the final release notes. + +Here's how to add a release note snippet to your pull request: + +- Create a Markdown file under ``doc/release-notes``. You can reuse the name of your branch and append ".md" to it, e.g. ``3728-doc-apipolicy-fix.md`` +- Edit the snippet to include anything you think should be mentioned in the release notes, such as: + + - Descriptions of new features + - Explanations of bugs fixed + - New configuration settings + - Upgrade instructions + - Etc. + +Release note snippets do not need to be long. For a new feature, a single line description might be enough. Please note that your release note will likely be edited (expanded or shortened) when the final release notes are being created. Push Your Branch to GitHub ~~~~~~~~~~~~~~~~~~~~~~~~~~ From 1ab441c718b13198f51fd9d5eb6732fc919c6a64 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 5 Jan 2024 16:36:24 -0500 Subject: [PATCH 0508/1112] cosmetic #3437 --- .../harvest/server/OAIRecordServiceBean.java | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java index 56c19e004dc..902a52c7b97 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java @@ -40,10 +40,6 @@ @Stateless @Named public class OAIRecordServiceBean implements java.io.Serializable { - @EJB - OAISetServiceBean oaiSetService; - @EJB - IndexServiceBean indexService; @EJB DatasetServiceBean datasetService; @EJB @@ -55,7 +51,23 @@ public class OAIRecordServiceBean implements java.io.Serializable { EntityManager em; private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean"); - + + /** + * Updates the OAI records for the set specified + * @param setName name of the OAI set + * @param datasetIds ids of the datasets that are candidates for this OAI set + * @param updateTime time stamp + * @param doExport attempt to export datasets that haven't been exported yet + * @param confirmed true if the datasetIds above were looked up in the database + * - as opposed to in the search engine. Meaning, that it is + * confirmed that any dataset not on this list that's currently + * in the set is no longer in the database and should be + * marked as deleted without any further checks. Otherwise + * we'll want to double-check if the dataset still exists + * as published. This is to prevent marking existing datasets + * as deleted during a full reindex and such. + * @param setUpdateLogger dedicated Logger + */ public void updateOaiRecords(String setName, List datasetIds, Date updateTime, boolean doExport, boolean confirmed, Logger setUpdateLogger) { // create Map of OaiRecords List oaiRecords = findOaiRecordsBySetName(setName); From 826d4bdcd2d0418c8d65c8409107de0d66f6dd19 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 5 Jan 2024 17:46:26 -0500 Subject: [PATCH 0509/1112] per QA --- doc/sphinx-guides/source/developers/globus-api.rst | 1 + .../java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index de9df06a798..2f922fb1fc0 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -2,6 +2,7 @@ Globus Transfer API =================== The Globus API addresses three use cases: + * Transfer to a Dataverse-managed Globus endpoint (File-based or using the Globus S3 Connector) * Reference of files that will remain in a remote Globus endpoint * Transfer from a Dataverse-managed Globus endpoint diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 61884045f35..3e60441850b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -240,7 +240,7 @@ private int makeDir(GlobusEndpoint endpoint, String dir) { MakeRequestResponse result = null; String body = "{\"DATA_TYPE\":\"mkdir\",\"path\":\"" + dir + "\"}"; try { - logger.info(body); + logger.fine(body); URL url = new URL( "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + "/mkdir"); result = makeRequest(url, "Bearer", endpoint.getClientToken(), "POST", body); From dbab6ca9269a93bd7d292b37b00c42dc0fbad55f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 8 Jan 2024 10:30:25 -0500 Subject: [PATCH 0510/1112] use name@email.xyz to match citation block #2638 From datasetfieldtype.datasetContactEmail.watermark --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index b1c38e52496..ece3f070cdd 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -155,7 +155,7 @@ contact.support=Support contact.from=From contact.from.required=User email is required. contact.from.invalid=Email is invalid. -contact.from.emailPlaceholder=valid@email.org +contact.from.emailPlaceholder=name@email.xyz contact.subject=Subject contact.subject.required=Subject is required. contact.subject.selectTab.top=Select subject... From 88af3d4ed1316df681ce53fc0d4c00d03ac56e7d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 8 Jan 2024 12:16:51 -0500 Subject: [PATCH 0511/1112] clean up error handling #9275 dataProvider.handle(params) allows us to return the correct error. --- .../harvest/server/web/servlet/OAIServlet.java | 16 ++++++---------- .../iq/dataverse/api/HarvestingServerIT.java | 6 ++---- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index 34152a2d8bd..233ca94f5fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -31,11 +31,9 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.MailUtil; import edu.harvard.iq.dataverse.util.SystemConfig; -import io.gdcc.xoai.exceptions.BadArgumentException; import io.gdcc.xoai.exceptions.BadVerbException; import io.gdcc.xoai.exceptions.OAIException; import io.gdcc.xoai.model.oaipmh.Granularity; -import io.gdcc.xoai.model.oaipmh.verbs.Verb; import io.gdcc.xoai.services.impl.SimpleResumptionTokenFormat; import org.apache.commons.lang3.StringUtils; @@ -51,6 +49,7 @@ import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; +import java.util.Map; import javax.xml.stream.XMLStreamException; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; @@ -260,18 +259,15 @@ private void processRequest(HttpServletRequest httpServletRequest, HttpServletRe return; } - RawRequest rawRequest = null; + Map params = httpServletRequest.getParameterMap(); + OAIPMH handle; try { - rawRequest = RequestBuilder.buildRawRequest(httpServletRequest.getParameterMap()); + RawRequest rawRequest = RequestBuilder.buildRawRequest(params); + handle = dataProvider.handle(rawRequest); } catch (BadVerbException bve) { - // Verb.Type is required. Hard-code one. - rawRequest = new RawRequest(Verb.Type.Identify); - // Ideally, withError would accept a BadVerbException. - BadArgumentException bae = new BadArgumentException(bve.getLocalizedMessage()); - rawRequest.withError(bae); + handle = dataProvider.handle(params); } - OAIPMH handle = dataProvider.handle(rawRequest); response.setContentType("text/xml;charset=UTF-8"); try (XmlWriter xmlWriter = new XmlWriter(response.getOutputStream(), repositoryConfiguration);) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index 3936a240826..45dd0c08226 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -869,8 +869,7 @@ public void testInvalidQueryParams() { noVerbArg.prettyPrint(); noVerbArg.then().assertThat() .statusCode(OK.getStatusCode()) - // This should be "badVerb" - .body("oai.error.@code", equalTo("badArgument")) + .body("oai.error.@code", equalTo("badVerb")) .body("oai.error", equalTo("No argument 'verb' found")); // The query parameter "verb" cannot appear more than once. @@ -878,8 +877,7 @@ public void testInvalidQueryParams() { repeated.prettyPrint(); repeated.then().assertThat() .statusCode(OK.getStatusCode()) - // This should be "badVerb" - .body("oai.error.@code", equalTo("badArgument")) + .body("oai.error.@code", equalTo("badVerb")) .body("oai.error", equalTo("Verb must be singular, given: '[foo, bar]'")); } From 2b1e5dd4bda6788f644c2737cf56310e7eaefb7d Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 8 Jan 2024 16:10:58 -0500 Subject: [PATCH 0512/1112] Extend getVersionFiles API endpoint to include the total file count --- .../iq/dataverse/api/AbstractApiBean.java | 64 +++----- .../harvard/iq/dataverse/api/Datasets.java | 146 +++++------------- .../harvard/iq/dataverse/api/DatasetsIT.java | 98 ++++++------ 3 files changed, 108 insertions(+), 200 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 58565bcc9d6..2a2843c0494 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -1,29 +1,6 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldServiceBean; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.DatasetLinkingDataverse; -import edu.harvard.iq.dataverse.DatasetLinkingServiceBean; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseLinkingDataverse; -import edu.harvard.iq.dataverse.DataverseLinkingServiceBean; -import edu.harvard.iq.dataverse.DataverseRoleServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.DvObjectServiceBean; -import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; -import edu.harvard.iq.dataverse.MetadataBlock; -import edu.harvard.iq.dataverse.MetadataBlockServiceBean; -import edu.harvard.iq.dataverse.PermissionServiceBean; -import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; -import edu.harvard.iq.dataverse.UserNotificationServiceBean; -import edu.harvard.iq.dataverse.UserServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -40,8 +17,8 @@ import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; -import edu.harvard.iq.dataverse.metrics.MetricsServiceBean; import edu.harvard.iq.dataverse.locality.StorageSiteServiceBean; +import edu.harvard.iq.dataverse.metrics.MetricsServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -51,33 +28,30 @@ import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; -import java.io.InputStream; -import java.net.URI; -import java.util.Arrays; -import java.util.Collections; -import java.util.UUID; -import java.util.concurrent.Callable; -import java.util.logging.Level; -import java.util.logging.Logger; import jakarta.ejb.EJB; import jakarta.ejb.EJBException; -import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonArrayBuilder; -import jakarta.json.JsonException; -import jakarta.json.JsonObject; -import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonValue; +import jakarta.json.*; import jakarta.json.JsonValue.ValueType; import jakarta.persistence.EntityManager; import jakarta.persistence.NoResultException; import jakarta.persistence.PersistenceContext; import jakarta.servlet.http.HttpServletRequest; import jakarta.ws.rs.container.ContainerRequestContext; -import jakarta.ws.rs.core.*; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.ResponseBuilder; import jakarta.ws.rs.core.Response.Status; +import java.io.InputStream; +import java.net.URI; +import java.util.Arrays; +import java.util.Collections; +import java.util.UUID; +import java.util.concurrent.Callable; +import java.util.logging.Level; +import java.util.logging.Logger; + import static org.apache.commons.lang3.StringUtils.isNumeric; /** @@ -661,7 +635,13 @@ protected Response ok( JsonArrayBuilder bld ) { .add("data", bld).build()) .type(MediaType.APPLICATION_JSON).build(); } - + protected Response ok( JsonArrayBuilder bld , long totalCount) { + return Response.ok(Json.createObjectBuilder() + .add("status", ApiConstants.STATUS_OK) + .add("total_count", totalCount) + .add("data", bld).build()) + .type(MediaType.APPLICATION_JSON).build(); + } protected Response ok( JsonArray ja ) { return Response.ok(Json.createObjectBuilder() .add("status", ApiConstants.STATUS_OK) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 094f2b88c92..56b9e8df319 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1,9 +1,11 @@ package edu.harvard.iq.dataverse.api; +import com.amazonaws.services.s3.model.PartETag; import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.auth.AuthRequired; +import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; @@ -13,6 +15,7 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode; +import edu.harvard.iq.dataverse.dataaccess.*; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; @@ -23,92 +26,47 @@ import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; -import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand; -import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.FinalizeDatasetPublicationCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetPrivateUrlCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ImportFromFileSystemCommand; -import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments; -import edu.harvard.iq.dataverse.engine.command.impl.ListVersionsCommand; -import edu.harvard.iq.dataverse.engine.command.impl.MoveDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult; -import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ReturnDatasetToAuthorCommand; -import edu.harvard.iq.dataverse.engine.command.impl.SetDatasetCitationDateCommand; -import edu.harvard.iq.dataverse.engine.command.impl.SetCurationStatusCommand; -import edu.harvard.iq.dataverse.engine.command.impl.SubmitDatasetForReviewCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetTargetURLCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException; +import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.externaltools.ExternalTool; import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.globus.GlobusUtil; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; -import edu.harvard.iq.dataverse.privateurl.PrivateUrl; -import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; -import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; -import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; -import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException; -import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetStorageSizeCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDvObjectPIDMetadataCommand; -import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitations; -import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitationsServiceBean; -import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics; -import edu.harvard.iq.dataverse.makedatacount.DatasetMetricsServiceBean; -import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; +import edu.harvard.iq.dataverse.makedatacount.*; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; import edu.harvard.iq.dataverse.metrics.MetricsUtil; -import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil; +import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; +import edu.harvard.iq.dataverse.search.IndexServiceBean; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.ArchiverUtil; -import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.util.EjbUtil; -import edu.harvard.iq.dataverse.util.FileUtil; -import edu.harvard.iq.dataverse.util.MarkupChecker; -import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.URLTokenUtil; +import edu.harvard.iq.dataverse.util.*; import edu.harvard.iq.dataverse.util.bagit.OREMap; -import edu.harvard.iq.dataverse.util.json.JSONLDUtil; -import edu.harvard.iq.dataverse.util.json.JsonLDTerm; -import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.json.JsonUtil; -import edu.harvard.iq.dataverse.util.SignpostingResources; -import edu.harvard.iq.dataverse.search.IndexServiceBean; -import static edu.harvard.iq.dataverse.api.ApiConstants.*; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; -import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; -import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import edu.harvard.iq.dataverse.util.json.*; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext; -import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; -import edu.harvard.iq.dataverse.globus.GlobusServiceBean; -import edu.harvard.iq.dataverse.globus.GlobusUtil; +import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.inject.Inject; +import jakarta.json.*; +import jakarta.json.stream.JsonParsingException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.*; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.*; +import jakarta.ws.rs.core.Response.Status; +import org.apache.commons.lang3.StringUtils; +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + import java.io.IOException; import java.io.InputStream; import java.net.URI; @@ -117,45 +75,21 @@ import java.text.SimpleDateFormat; import java.time.LocalDate; import java.time.LocalDateTime; -import java.util.*; -import java.util.concurrent.*; -import java.util.function.Predicate; import java.time.ZoneId; import java.time.format.DateTimeFormatter; +import java.util.*; import java.util.Map.Entry; +import java.util.concurrent.ExecutionException; +import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import java.util.stream.Collectors; -import jakarta.ejb.EJB; -import jakarta.ejb.EJBException; -import jakarta.inject.Inject; -import jakarta.json.*; -import jakarta.json.stream.JsonParsingException; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.NotAcceptableException; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.PUT; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.container.ContainerRequestContext; -import jakarta.ws.rs.core.*; -import jakarta.ws.rs.core.Response.Status; + +import static edu.harvard.iq.dataverse.api.ApiConstants.*; +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; +import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; -import org.apache.commons.lang3.StringUtils; -import org.glassfish.jersey.media.multipart.FormDataBodyPart; -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; -import org.glassfish.jersey.media.multipart.FormDataParam; -import com.amazonaws.services.s3.model.PartETag; -import edu.harvard.iq.dataverse.settings.JvmSettings; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -546,7 +480,9 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, } catch (IllegalArgumentException e) { return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } - return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); + // TODO: should we count the total every time or only when offset = 0? + return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)), + datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); }, getRequestUser(crc)); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 200cfbaf1ff..ace69a6c606 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,77 +1,66 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; import edu.harvard.iq.dataverse.FileSearchCriteria; -import io.restassured.RestAssured; -import static edu.harvard.iq.dataverse.DatasetVersion.ARCHIVE_NOTE_MAX_LENGTH; -import static edu.harvard.iq.dataverse.api.ApiConstants.*; -import static io.restassured.RestAssured.given; -import io.restassured.path.json.JsonPath; -import io.restassured.http.ContentType; -import io.restassured.response.Response; -import java.time.LocalDate; -import java.time.format.DateTimeFormatter; -import java.util.*; -import java.util.logging.Logger; -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.skyscreamer.jsonassert.JSONAssert; -import org.junit.jupiter.api.Disabled; -import jakarta.json.JsonObject; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; -import static jakarta.ws.rs.core.Response.Status.OK; -import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; -import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; -import static jakarta.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED; -import static jakarta.ws.rs.core.Response.Status.CONFLICT; -import static jakarta.ws.rs.core.Response.Status.NO_CONTENT; -import edu.harvard.iq.dataverse.DataFile; -import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER; import edu.harvard.iq.dataverse.authorization.DataverseRole; +import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO; import edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIOTest; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; -import io.restassured.parsing.Parser; -import static io.restassured.path.json.JsonPath.with; -import io.restassured.path.xml.XmlPath; -import static edu.harvard.iq.dataverse.api.UtilIT.equalToCI; -import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers; import edu.harvard.iq.dataverse.datavariable.VarGroup; import edu.harvard.iq.dataverse.datavariable.VariableMetadata; import edu.harvard.iq.dataverse.datavariable.VariableMetadataDDIParser; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.StringReader; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.Files; +import io.restassured.RestAssured; +import io.restassured.http.ContentType; +import io.restassured.parsing.Parser; +import io.restassured.path.json.JsonPath; +import io.restassured.path.xml.XmlPath; +import io.restassured.response.Response; import jakarta.json.Json; import jakarta.json.JsonArray; +import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; import jakarta.ws.rs.core.Response.Status; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.hamcrest.CoreMatchers; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.skyscreamer.jsonassert.JSONAssert; + import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.StringReader; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.*; +import java.util.logging.Logger; + +import static edu.harvard.iq.dataverse.DatasetVersion.ARCHIVE_NOTE_MAX_LENGTH; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; +import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER; +import static edu.harvard.iq.dataverse.api.UtilIT.equalToCI; +import static io.restassured.RestAssured.given; +import static io.restassured.path.json.JsonPath.with; +import static jakarta.ws.rs.core.Response.Status.*; import static java.lang.Thread.sleep; -import org.hamcrest.CoreMatchers; -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.hasItems; -import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.*; @@ -3548,7 +3537,9 @@ public void getVersionFiles() throws IOException, InterruptedException { getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName1)) - .body("data[1].label", equalTo(testFileName2)); + .body("data[1].label", equalTo(testFileName2)) + .body("total_count", equalTo(5)); + String x = getVersionFilesResponsePaginated.prettyPrint(); int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); @@ -3562,7 +3553,8 @@ public void getVersionFiles() throws IOException, InterruptedException { getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName3)) - .body("data[1].label", equalTo(testFileName4)); + .body("data[1].label", equalTo(testFileName4)) + .body("total_count", equalTo(5)); fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); From 0807b1fd64b076ef92029a16b1c3a946802c56b7 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 8 Jan 2024 16:18:55 -0500 Subject: [PATCH 0513/1112] fix format --- src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 2a2843c0494..419132f7ba7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -635,6 +635,7 @@ protected Response ok( JsonArrayBuilder bld ) { .add("data", bld).build()) .type(MediaType.APPLICATION_JSON).build(); } + protected Response ok( JsonArrayBuilder bld , long totalCount) { return Response.ok(Json.createObjectBuilder() .add("status", ApiConstants.STATUS_OK) @@ -642,6 +643,7 @@ protected Response ok( JsonArrayBuilder bld , long totalCount) { .add("data", bld).build()) .type(MediaType.APPLICATION_JSON).build(); } + protected Response ok( JsonArray ja ) { return Response.ok(Json.createObjectBuilder() .add("status", ApiConstants.STATUS_OK) From 53e525d7ddddcc4fd055f45debc126f8b2340ffc Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 8 Jan 2024 16:24:21 -0500 Subject: [PATCH 0514/1112] fix format --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index ace69a6c606..91aa33f6b1f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3539,7 +3539,6 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[0].label", equalTo(testFileName1)) .body("data[1].label", equalTo(testFileName2)) .body("total_count", equalTo(5)); - String x = getVersionFilesResponsePaginated.prettyPrint(); int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); From b8a79a1d8a6240a8997abf2fe1332140a4bff62b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 8 Jan 2024 21:08:53 -0500 Subject: [PATCH 0515/1112] adds a simple api for clearing a single dataset from Solr. I want to have it in order to be able to create an api test for for a specific OAI set export case. But I figure it could be useful otherwise. #3437 --- .../edu/harvard/iq/dataverse/api/Index.java | 25 ++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java index 4910c460b6a..2516c05e634 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java @@ -215,7 +215,7 @@ public Response clearSolrIndex() { return error(Status.INTERNAL_SERVER_ERROR, ex.getLocalizedMessage()); } } - + @GET @Path("{type}/{id}") public Response indexTypeById(@PathParam("type") String type, @PathParam("id") Long id) { @@ -326,6 +326,29 @@ public Response indexDatasetByPersistentId(@QueryParam("persistentId") String pe } } + /** + * Clears the entry for a dataset from Solr + * + * @param id numer id of the dataset + * @return response; + * will return 404 if no such dataset in the database; but will attempt to + * clear the entry from Solr regardless. + */ + @DELETE + @Path("datasets/clear/{id}") + public Response clearDatasetFromIndex(@PathParam("id") Long id) { + Dataset dataset = datasetService.find(id); + // We'll attempt to delete the Solr document regardless of whether the + // dataset exists in the database: + String response = indexService.removeSolrDocFromIndex(IndexServiceBean.solrDocIdentifierDataset + id); + if (dataset != null) { + return ok("Sent request to clear Solr document for dataset " + id + ": " + response); + } else { + return notFound("Could not find dataset " + id + " in the database. Requested to clear from Solr anyway: " + response); + } + } + + /** * This is just a demo of the modular math logic we use for indexAll. */ From 622a676681a336fd78e89d1f6d21e3e703eb7d7a Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 9 Jan 2024 10:32:12 -0500 Subject: [PATCH 0516/1112] updated per review comments --- ...-extend-getVersionFiles-api-to-include-total-file-count.md | 2 ++ doc/sphinx-guides/source/api/native-api.rst | 4 +++- .../java/edu/harvard/iq/dataverse/api/AbstractApiBean.java | 2 +- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 1 - src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++-- 5 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md diff --git a/doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md b/doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md new file mode 100644 index 00000000000..80a71e9bb7e --- /dev/null +++ b/doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md @@ -0,0 +1,2 @@ +The response for getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) endpoint has been modified to include a total count of records available (totalCount:x). +This will aid in pagination by allowing the caller to know how many pages can be iterated through. The existing API (getVersionFileCounts) to return the count will still be available. \ No newline at end of file diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6591c983824..48fc16bf141 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1066,7 +1066,9 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files" -This endpoint supports optional pagination, through the ``limit`` and ``offset`` query parameters: +This endpoint supports optional pagination, through the ``limit`` and ``offset`` query parameters. +To aid in pagination the Json response also includes the total number of rows (totalCount) available. +Usage example: .. code-block:: bash diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 419132f7ba7..bc94d7f0bcc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -639,7 +639,7 @@ protected Response ok( JsonArrayBuilder bld ) { protected Response ok( JsonArrayBuilder bld , long totalCount) { return Response.ok(Json.createObjectBuilder() .add("status", ApiConstants.STATUS_OK) - .add("total_count", totalCount) + .add("totalCount", totalCount) .add("data", bld).build()) .type(MediaType.APPLICATION_JSON).build(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 56b9e8df319..3a2497d9418 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -480,7 +480,6 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, } catch (IllegalArgumentException e) { return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } - // TODO: should we count the total every time or only when offset = 0? return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)), datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); }, getRequestUser(crc)); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 91aa33f6b1f..5753550d564 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3538,7 +3538,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName1)) .body("data[1].label", equalTo(testFileName2)) - .body("total_count", equalTo(5)); + .body("totalCount", equalTo(5)); int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); @@ -3553,7 +3553,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName3)) .body("data[1].label", equalTo(testFileName4)) - .body("total_count", equalTo(5)); + .body("totalCount", equalTo(5)); fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); From 4b6fb504873b6864060f0e15f1a6609b3f05a2d0 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 Jan 2024 10:32:50 -0500 Subject: [PATCH 0517/1112] cosmetic #3437 --- src/main/java/edu/harvard/iq/dataverse/api/Index.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java index 2516c05e634..a55ddad0fa0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java @@ -335,7 +335,7 @@ public Response indexDatasetByPersistentId(@QueryParam("persistentId") String pe * clear the entry from Solr regardless. */ @DELETE - @Path("datasets/clear/{id}") + @Path("datasets/{id}/clear") public Response clearDatasetFromIndex(@PathParam("id") Long id) { Dataset dataset = datasetService.find(id); // We'll attempt to delete the Solr document regardless of whether the From 291811e3e3c6f0f8c54dcd6b980444259e247d70 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 9 Jan 2024 11:42:34 -0500 Subject: [PATCH 0518/1112] #9686 add migration to harvested files --- .../migration/V6.1.0.1__9686-move-harvestingclient-id.sql | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql b/src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql index 22142b8fc41..67ba026745f 100644 --- a/src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql +++ b/src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql @@ -1,8 +1,14 @@ ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS harvestingclient_id BIGINT; +--add harvesting client id to dvobject records of harvested datasets update dvobject dvo set harvestingclient_id = s.harvestingclient_id from (select id, harvestingclient_id from dataset d where d.harvestingclient_id is not null) s where s.id = dvo.id; +--add harvesting client id to dvobject records of harvested files +update dvobject dvo set harvestingclient_id = s.harvestingclient_id from +(select id, harvestingclient_id from dataset d where d.harvestingclient_id is not null) s +where s.id = dvo.owner_id; + ALTER TABLE dataset drop COLUMN IF EXISTS harvestingclient_id; From c6ec7faefb1101a29c1e89e4f40a9085c3234e93 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 Jan 2024 12:10:59 -0500 Subject: [PATCH 0519/1112] api tests (work in progress) #3437 --- .../iq/dataverse/api/HarvestingServerIT.java | 50 ++++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 7 ++- 2 files changed, 54 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index e02964ef28f..629e72aec06 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -23,6 +23,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -39,6 +40,7 @@ public class HarvestingServerIT { private static String adminUserAPIKey; private static String singleSetDatasetIdentifier; private static String singleSetDatasetPersistentId; + private static Integer singleSetDatasetDatabaseId; private static List extraDatasetsIdentifiers = new ArrayList<>(); @BeforeAll @@ -84,7 +86,7 @@ private static void setupDatasets() { // create dataset: Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, adminUserAPIKey); createDatasetResponse.prettyPrint(); - Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + singleSetDatasetDatabaseId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); // retrieve the global id: singleSetDatasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse); @@ -110,7 +112,7 @@ private static void setupDatasets() { // create dataset: createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, adminUserAPIKey); createDatasetResponse.prettyPrint(); - datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); // retrieve the global id: String thisDatasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse); @@ -395,6 +397,11 @@ public void testSetEditAPIandOAIlistSets() { // OAI set with a single dataset, and attempt to retrieve // it and validate the OAI server responses of the corresponding // ListIdentifiers, ListRecords and GetRecord methods. + // Finally, we will make sure that the test reexport survives + // a reexport when the control dataset is dropped from the search + // index temporarily (if, for example, the site admin cleared their + // solr index in order to reindex everything from scratch - which + // can take a while on a large database). This is per #3437 @Test public void testSingleRecordOaiSet() throws InterruptedException { // Let's try and create an OAI set with the "single set dataset" that @@ -549,7 +556,46 @@ public void testSingleRecordOaiSet() throws InterruptedException { assertEquals("Medicine, Health and Life Sciences", responseXmlPath.getString("OAI-PMH.GetRecord.record.metadata.dc.subject")); // ok, looks legit! + + // Now, let's clear this dataset from Solr: + Response solrClearResponse = UtilIT.indexClearDataset(singleSetDatasetDatabaseId); + assertEquals(200, solrClearResponse.getStatusCode()); + + // Now, let's re-export the set. The search query that defines the set + // will no longer find it (todo: confirm this first?). However, since + // the dataset still exists in the database; and would in real life + // be reindexed again, we don't want to mark the OAI record for the + // dataset as "deleted" just yet. (this is a new feature, as of 6.2) + // So, let's re-export the set... + + exportSetResponse = UtilIT.exportOaiSet(setName); + assertEquals(200, exportSetResponse.getStatusCode()); + Thread.sleep(10000L); // wait for just a second, to be safe + + // OAI Test 5. Check ListIdentifiers again: + + Response listIdentifiersResponse = UtilIT.getOaiListIdentifiers(setName, "oai_dc"); + assertEquals(OK.getStatusCode(), listIdentifiersResponse.getStatusCode()); + // Validate the service section of the OAI response: + responseXmlPath = validateOaiVerbResponse(listIdentifiersResponse, "ListIdentifiers"); + + // ... and confirm that the record for our dataset is still listed + // as active: + List ret = responseXmlPath.getList("OAI-PMH.ListIdentifiers.header"); + + assertEquals(1, ret.size()); + assertEquals(singleSetDatasetPersistentId, responseXmlPath + .getString("OAI-PMH.ListIdentifiers.header.identifier")); + assertEquals(setName, responseXmlPath + .getString("OAI-PMH.ListIdentifiers.header.setSpec")); + // ... and, most importantly, make sure the record does not have a + // `status="deleted"` attribute: + assertNull(responseXmlPath.getString("OAI-PMH.ListIdentifiers.header.@status")); + + // TODO: (?) we could also destroy the dataset for real now, and make + // sure the "deleted" attribute has been added to the OAI record. + } // This test will attempt to create a set with multiple records (enough diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index b6dfc697f3c..cbc2f974fec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1494,6 +1494,11 @@ static Response reindexDataset(String persistentId) { return response; } + static Response indexClearDataset(Integer datasetId) { + return given() + .delete("/api/admin/index/datasets/"+datasetId+"/clear"); + } + static Response reindexDataverse(String dvId) { Response response = given() .get("/api/admin/index/dataverses/" + dvId); @@ -2066,7 +2071,7 @@ static Response indexClear() { return given() .get("/api/admin/index/clear"); } - + static Response index() { return given() .get("/api/admin/index"); From dfb1795e1318d058c4b614894ce9cd1039da38d3 Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Tue, 9 Jan 2024 17:37:06 +0000 Subject: [PATCH 0520/1112] Added: minor docs formatting tweaks --- doc/sphinx-guides/source/api/native-api.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 48fc16bf141..09fc3c69693 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1067,7 +1067,9 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files" This endpoint supports optional pagination, through the ``limit`` and ``offset`` query parameters. -To aid in pagination the Json response also includes the total number of rows (totalCount) available. + +To aid in pagination the JSON response also includes the total number of rows (totalCount) available. + Usage example: .. code-block:: bash From 03f4a06b5ed163d9252e6e868fa2e939fda0a2e0 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 9 Jan 2024 13:30:34 -0500 Subject: [PATCH 0521/1112] #9686 add a release note --- doc/release-notes/9686-move-harvesting-client-id.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9686-move-harvesting-client-id.md diff --git a/doc/release-notes/9686-move-harvesting-client-id.md b/doc/release-notes/9686-move-harvesting-client-id.md new file mode 100644 index 00000000000..110fcc6ca6e --- /dev/null +++ b/doc/release-notes/9686-move-harvesting-client-id.md @@ -0,0 +1 @@ +With this release the harvesting client id will be available for harvested files. A database update will copy the id to previously harvested files./ From 7c920676611ecabd932067e8124b2df2e166a18b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 Jan 2024 13:56:59 -0500 Subject: [PATCH 0522/1112] API test finished. #3437 --- .../harvest/server/OAIRecordServiceBean.java | 2 +- .../iq/dataverse/api/HarvestingServerIT.java | 42 ++++++++++++++++++- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java index 902a52c7b97..cc15d4c978b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java @@ -65,7 +65,7 @@ public class OAIRecordServiceBean implements java.io.Serializable { * marked as deleted without any further checks. Otherwise * we'll want to double-check if the dataset still exists * as published. This is to prevent marking existing datasets - * as deleted during a full reindex and such. + * as deleted during a full reindex etc. * @param setUpdateLogger dedicated Logger */ public void updateOaiRecords(String setName, List datasetIds, Date updateTime, boolean doExport, boolean confirmed, Logger setUpdateLogger) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index 629e72aec06..f076f819f6f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -560,6 +560,7 @@ public void testSingleRecordOaiSet() throws InterruptedException { // Now, let's clear this dataset from Solr: Response solrClearResponse = UtilIT.indexClearDataset(singleSetDatasetDatabaseId); assertEquals(200, solrClearResponse.getStatusCode()); + solrClearResponse.prettyPrint(); // Now, let's re-export the set. The search query that defines the set // will no longer find it (todo: confirm this first?). However, since @@ -570,7 +571,7 @@ public void testSingleRecordOaiSet() throws InterruptedException { exportSetResponse = UtilIT.exportOaiSet(setName); assertEquals(200, exportSetResponse.getStatusCode()); - Thread.sleep(10000L); // wait for just a second, to be safe + Thread.sleep(1000L); // wait for just a second, to be safe // OAI Test 5. Check ListIdentifiers again: @@ -596,6 +597,43 @@ public void testSingleRecordOaiSet() throws InterruptedException { // TODO: (?) we could also destroy the dataset for real now, and make // sure the "deleted" attribute has been added to the OAI record. + // While we are at it, let's now destroy this dataset for real, and + // make sure the "deleted" attribute is actually added once the set + // is re-exported: + + Response destroyDatasetResponse = UtilIT.destroyDataset(singleSetDatasetPersistentId, adminUserAPIKey); + assertEquals(200, destroyDatasetResponse.getStatusCode()); + destroyDatasetResponse.prettyPrint(); + + // Confirm that it no longer exists: + Response datasetNotFoundResponse = UtilIT.nativeGet(singleSetDatasetDatabaseId, adminUserAPIKey); + assertEquals(404, datasetNotFoundResponse.getStatusCode()); + + // Repeat the whole production with re-exporting set and checking + // ListIdentifiers: + + exportSetResponse = UtilIT.exportOaiSet(setName); + assertEquals(200, exportSetResponse.getStatusCode()); + Thread.sleep(1000L); // wait for just a second, to be safe + System.out.println("re-exported the dataset again, with the control dataset destroyed"); + + // OAI Test 6. Check ListIdentifiers again: + + listIdentifiersResponse = UtilIT.getOaiListIdentifiers(setName, "oai_dc"); + assertEquals(OK.getStatusCode(), listIdentifiersResponse.getStatusCode()); + + // Validate the service section of the OAI response: + responseXmlPath = validateOaiVerbResponse(listIdentifiersResponse, "ListIdentifiers"); + + // ... and confirm that the record for our dataset is still listed... + ret = responseXmlPath.getList("OAI-PMH.ListIdentifiers.header"); + assertEquals(1, ret.size()); + assertEquals(singleSetDatasetPersistentId, responseXmlPath + .getString("OAI-PMH.ListIdentifiers.header.identifier")); + + // ... BUT, it should be marked as "deleted" now: + assertEquals(responseXmlPath.getString("OAI-PMH.ListIdentifiers.header.@status"), "deleted"); + } // This test will attempt to create a set with multiple records (enough @@ -910,7 +948,7 @@ public void testMultiRecordOaiSet() throws InterruptedException { // TODO: // What else can we test? // Some ideas: - // - Test handling of deleted dataset records + // - Test handling of deleted dataset records - DONE! // - Test "from" and "until" time parameters // - Validate full verb response records against XML schema // (for each supported metadata format, possibly?) From 37b9a8cb0e24b9b83780c9c1ab7c0bf32272b0d2 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 Jan 2024 14:23:54 -0500 Subject: [PATCH 0523/1112] Added a guide entry for the new api call/renamed the call itself #3437 --- .../source/admin/solr-search-index.rst | 14 ++++++++++++-- .../java/edu/harvard/iq/dataverse/api/Index.java | 2 +- .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/admin/solr-search-index.rst b/doc/sphinx-guides/source/admin/solr-search-index.rst index e6f7b588ede..4c71ef9d4a8 100644 --- a/doc/sphinx-guides/source/admin/solr-search-index.rst +++ b/doc/sphinx-guides/source/admin/solr-search-index.rst @@ -26,8 +26,8 @@ Remove all Solr documents that are orphaned (i.e. not associated with objects in ``curl http://localhost:8080/api/admin/index/clear-orphans`` -Clearing Data from Solr -~~~~~~~~~~~~~~~~~~~~~~~ +Clearing ALL Data from Solr +~~~~~~~~~~~~~~~~~~~~~~~~~~~ Please note that the moment you issue this command, it will appear to end users looking at the root Dataverse installation page that all data is gone! This is because the root Dataverse installation page is powered by the search index. @@ -86,6 +86,16 @@ To re-index a dataset by its database ID: ``curl http://localhost:8080/api/admin/index/datasets/7504557`` +Clearing a Dataset from Solr +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API will clear the Solr entry for the dataset specified. It can be useful if you have reasons to stop showing a published dataset in search results and/or on Collection pages, but don't want to destroy and purge it from the database just yet. + +``curl -X DELETE http://localhost:8080/api/admin/index/datasets/`` + +This can be reversed of course by re-indexing the dataset with the API above. + + Manually Querying Solr ---------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java index a55ddad0fa0..c30a77acb58 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java @@ -335,7 +335,7 @@ public Response indexDatasetByPersistentId(@QueryParam("persistentId") String pe * clear the entry from Solr regardless. */ @DELETE - @Path("datasets/{id}/clear") + @Path("datasets/{id}") public Response clearDatasetFromIndex(@PathParam("id") Long id) { Dataset dataset = datasetService.find(id); // We'll attempt to delete the Solr document regardless of whether the diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index cbc2f974fec..49da0445e52 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1496,7 +1496,7 @@ static Response reindexDataset(String persistentId) { static Response indexClearDataset(Integer datasetId) { return given() - .delete("/api/admin/index/datasets/"+datasetId+"/clear"); + .delete("/api/admin/index/datasets/"+datasetId); } static Response reindexDataverse(String dvId) { From b9bcf995b42889af3333368b3264f49264df52ef Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Tue, 9 Jan 2024 14:58:32 -0500 Subject: [PATCH 0524/1112] Update Kanban Board URL The URL was pointing to the old board. --- doc/sphinx-guides/source/developers/intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index a01a8066897..f446b73de09 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -40,7 +40,7 @@ For the Dataverse Software development roadmap, please see https://www.iq.harvar Kanban Board ------------ -You can get a sense of what's currently in flight (in dev, in QA, etc.) by looking at https://github.com/orgs/IQSS/projects/2 +You can get a sense of what's currently in flight (in dev, in QA, etc.) by looking at https://github.com/orgs/IQSS/projects/34 Issue Tracker ------------- From 94570f0c670e6d39594c5cfb9ca5233962834de0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 10 Jan 2024 10:59:21 -0500 Subject: [PATCH 0525/1112] add toc to docs #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 2f922fb1fc0..b5d420467aa 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -1,6 +1,9 @@ Globus Transfer API =================== +.. contents:: |toctitle| + :local: + The Globus API addresses three use cases: * Transfer to a Dataverse-managed Globus endpoint (File-based or using the Globus S3 Connector) From 67292840e9b6e2f701fd6bc0e09522b0b2d0ef07 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 10 Jan 2024 13:16:27 -0500 Subject: [PATCH 0526/1112] Add comments and makes the loop easier to understand. --- ...tLatestPublishedDatasetVersionCommand.java | 44 +++++++++++++------ 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index a4952bbf524..dd9a8112afe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -17,33 +17,51 @@ public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final boolean includeDeaccessioned; - private boolean checkPerms; + private boolean checkPermsWhenDeaccessioned; public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { this(aRequest, anAffectedDataset, false, false); } - public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPerms) { + public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; this.includeDeaccessioned = includeDeaccessioned; - this.checkPerms = checkPerms; + this.checkPermsWhenDeaccessioned = checkPermsWhenDeaccessioned; } + /* + * This command depending on the requested parameters will return: + * + * If the user requested to include a deaccessioned dataset with the files, the command will return the deaccessioned version if the user has permissions to view the files. Otherwise, it will return null. + * If the user requested to include a deaccessioned dataset but did not request the files, the command will return the deaccessioned version. + * If the user did not request to include a deaccessioned dataset, the command will return the latest published version. + * + */ @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - for (DatasetVersion dsv : ds.getVersions()) { - if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { - - if(dsv.isDeaccessioned() && checkPerms){ - if(!ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset)){ - return null; - } - } - return dsv; + DatasetVersion dsv = null; + + //We search of a released or deaccessioned version if it is requested. + for (DatasetVersion next : ds.getVersions()) { + if (next.isReleased() || (includeDeaccessioned && next.isDeaccessioned())){ + dsv = next; + break; + } + } + + //Checking permissions if the deaccessionedVersion was found and we are checking permissions because files were requested. + if(dsv != null && (dsv.isDeaccessioned() && checkPermsWhenDeaccessioned)){ + //If the user has no permissions we return null + if(!ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset)){ + dsv = null; } } - return null; + + return dsv; } + + + } From 7e30c4ae14d2b4af5d80e9722192ec0a2680bcd9 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 10 Jan 2024 14:32:34 -0500 Subject: [PATCH 0527/1112] fixes the API tests. #3437 --- .../iq/dataverse/api/HarvestingServerIT.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index f076f819f6f..cca571efee0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -106,9 +106,9 @@ private static void setupDatasets() { // So wait for all of this to finish. UtilIT.sleepForReexport(singleSetDatasetPersistentId, adminUserAPIKey, 10); - // ... And let's create 4 more datasets for a multi-dataset experiment: + // ... And let's create 5 more datasets for a multi-dataset experiment: - for (int i = 0; i < 4; i++) { + for (int i = 0; i < 5; i++) { // create dataset: createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, adminUserAPIKey); createDatasetResponse.prettyPrint(); @@ -653,9 +653,13 @@ public void testMultiRecordOaiSet() throws InterruptedException { // in the class init: String setName = UtilIT.getRandomString(6); - String setQuery = "(dsPersistentId:" + singleSetDatasetIdentifier; + String setQuery = ""; for (String persistentId : extraDatasetsIdentifiers) { - setQuery = setQuery.concat(" OR dsPersistentId:" + persistentId); + if (setQuery.equals("")) { + setQuery = "(dsPersistentId:" + persistentId; + } else { + setQuery = setQuery.concat(" OR dsPersistentId:" + persistentId); + } } setQuery = setQuery.concat(")"); @@ -796,7 +800,6 @@ public void testMultiRecordOaiSet() throws InterruptedException { boolean allDatasetsListed = true; - allDatasetsListed = persistentIdsInListIdentifiers.contains(singleSetDatasetIdentifier); for (String persistentId : extraDatasetsIdentifiers) { allDatasetsListed = allDatasetsListed && persistentIdsInListIdentifiers.contains(persistentId); } @@ -921,12 +924,11 @@ public void testMultiRecordOaiSet() throws InterruptedException { // Record the last identifier listed on this final page: persistentIdsInListRecords.add(ret.get(0).substring(ret.get(0).lastIndexOf('/') + 1)); - // Finally, let's confirm that the expected 5 datasets have been listed + // Finally, let's confirm again that the expected 5 datasets have been listed // as part of this Set: allDatasetsListed = true; - allDatasetsListed = persistentIdsInListRecords.contains(singleSetDatasetIdentifier); for (String persistentId : extraDatasetsIdentifiers) { allDatasetsListed = allDatasetsListed && persistentIdsInListRecords.contains(persistentId); } From 9d18da511af71dd4daeb1f76c330c5a25dbcca23 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 11 Jan 2024 11:01:08 +0000 Subject: [PATCH 0528/1112] Added: displayOrder and isRequired fields to DatasetFieldType payload --- .../harvard/iq/dataverse/util/json/JsonPrinter.java | 2 ++ .../edu/harvard/iq/dataverse/api/MetadataBlocksIT.java | 10 ++++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index cfc266f2ba7..a97ef9c12d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -570,6 +570,8 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { fieldsBld.add("multiple", fld.isAllowMultiples()); fieldsBld.add("isControlledVocabulary", fld.isControlledVocabulary()); fieldsBld.add("displayFormat", fld.getDisplayFormat()); + fieldsBld.add("isRequired", fld.isRequired()); + fieldsBld.add("displayOrder", fld.getDisplayOrder()); if (fld.isControlledVocabulary()) { // If the field has a controlled vocabulary, // add all values to the resulting JSON diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index c301e158b4e..f1c3a9815f1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -25,7 +25,9 @@ void testGetCitationBlock() { getCitationBlock.prettyPrint(); getCitationBlock.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.fields.subject.controlledVocabularyValues[0]", CoreMatchers.is("Agricultural Sciences")); + .body("data.fields.subject.controlledVocabularyValues[0]", CoreMatchers.is("Agricultural Sciences")) + .body("data.fields.title.displayOrder", CoreMatchers.is(0)) + .body("data.fields.title.isRequired", CoreMatchers.is(true)); } @Test @@ -37,18 +39,18 @@ void testDatasetWithAllDefaultMetadata() { ", response=" + createUser.prettyPrint()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); assumeFalse(apiToken == null || apiToken.isBlank()); - + Response createCollection = UtilIT.createRandomDataverse(apiToken); assumeTrue(createCollection.statusCode() < 300, "code=" + createCollection.statusCode() + ", response=" + createCollection.prettyPrint()); String dataverseAlias = UtilIT.getAliasFromResponse(createCollection); assumeFalse(dataverseAlias == null || dataverseAlias.isBlank()); - + // when String pathToJsonFile = "scripts/api/data/dataset-create-new-all-default-fields.json"; Response createDataset = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken); - + // then assertEquals(CREATED.getStatusCode(), createDataset.statusCode(), "code=" + createDataset.statusCode() + From e8054138219ffc499c756ee9d77bdb77d7450a23 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 11 Jan 2024 11:06:16 +0000 Subject: [PATCH 0529/1112] Added: release notes for #10216 --- doc/release-notes/10216-metadatablocks.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 doc/release-notes/10216-metadatablocks.md diff --git a/doc/release-notes/10216-metadatablocks.md b/doc/release-notes/10216-metadatablocks.md new file mode 100644 index 00000000000..8fbd4f37e14 --- /dev/null +++ b/doc/release-notes/10216-metadatablocks.md @@ -0,0 +1,4 @@ +The API endpoint `/api/metadatablocks/{block_id}` has been extended to include the following fields: + +- `isRequired` - Wether or not this field is required +- `displayOrder`: The display order of the field in create/edit forms From a833e168d8d4df499feca9796f38fbb186581c8b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 Jan 2024 09:46:24 -0500 Subject: [PATCH 0530/1112] minor doc changes #3437 --- doc/release-notes/3437-new-index-api-added.md | 4 ++++ doc/sphinx-guides/source/admin/solr-search-index.rst | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 doc/release-notes/3437-new-index-api-added.md diff --git a/doc/release-notes/3437-new-index-api-added.md b/doc/release-notes/3437-new-index-api-added.md new file mode 100644 index 00000000000..2f40c65073f --- /dev/null +++ b/doc/release-notes/3437-new-index-api-added.md @@ -0,0 +1,4 @@ +(this API was added as a side feature of the pr #10222. the main point of the pr was an improvement in the OAI set housekeeping logic, I believe it's too obscure part of the system to warrant a relase note by itself. but the new API below needs to be announced). + +A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. + diff --git a/doc/sphinx-guides/source/admin/solr-search-index.rst b/doc/sphinx-guides/source/admin/solr-search-index.rst index 4c71ef9d4a8..3f7b9d5b547 100644 --- a/doc/sphinx-guides/source/admin/solr-search-index.rst +++ b/doc/sphinx-guides/source/admin/solr-search-index.rst @@ -89,7 +89,7 @@ To re-index a dataset by its database ID: Clearing a Dataset from Solr ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This API will clear the Solr entry for the dataset specified. It can be useful if you have reasons to stop showing a published dataset in search results and/or on Collection pages, but don't want to destroy and purge it from the database just yet. +This API will clear the Solr entry for the dataset specified. It can be useful if you have reasons to want to hide a published dataset from showing in search results and/or on Collection pages, but don't want to destroy and purge it from the database just yet. ``curl -X DELETE http://localhost:8080/api/admin/index/datasets/`` From 462d8f743ba96beb39a2d30ec49eb0ee3ae9d210 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 11 Jan 2024 10:17:18 -0500 Subject: [PATCH 0531/1112] #10216 typo in release note --- doc/release-notes/10216-metadatablocks.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10216-metadatablocks.md b/doc/release-notes/10216-metadatablocks.md index 8fbd4f37e14..b3be7e76abc 100644 --- a/doc/release-notes/10216-metadatablocks.md +++ b/doc/release-notes/10216-metadatablocks.md @@ -1,4 +1,4 @@ The API endpoint `/api/metadatablocks/{block_id}` has been extended to include the following fields: -- `isRequired` - Wether or not this field is required +- `isRequired` - Whether or not this field is required - `displayOrder`: The display order of the field in create/edit forms From b1bb6a047cc347a6d6c97ba9f56060d3805ec545 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 11:35:34 -0500 Subject: [PATCH 0532/1112] minor doc tweaks #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index b5d420467aa..96475f33230 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -72,7 +72,7 @@ The response includes the id for the Globus endpoint to use along with several s The getDatasetMetadata and getFileListing URLs are just signed versions of the standard Dataset metadata and file listing API calls. The other two are Globus specific. -If called for a dataset using a store that is configured with a remote Globus endpoint(s), the return response is similar but the response includes a +If called for, a dataset using a store that is configured with a remote Globus endpoint(s), the return response is similar but the response includes a the "managed" parameter will be false, the "endpoint" parameter is replaced with a JSON array of "referenceEndpointsWithPaths" and the requestGlobusTransferPaths and addGlobusFiles URLs are replaced with ones for requestGlobusReferencePaths and addFiles. All of these calls are described further below. @@ -91,7 +91,7 @@ The returned response includes the same getDatasetMetadata and getFileListing UR Performing an Upload/Transfer In -------------------------------- -The information from the API call above can be used to provide a user with information about the dataset and to prepare to transfer or to reference files (based on the "managed" parameter). +The information from the API call above can be used to provide a user with information about the dataset and to prepare to transfer (managed=true) or to reference files (managed=false). Once the user identifies which files are to be added, the requestGlobusTransferPaths or requestGlobusReferencePaths URLs can be called. These both reference the same API call but must be used with different entries in the JSON body sent: From 1c3162f01cb921b21a72042ea03b1e9ca94c6da9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 11:49:01 -0500 Subject: [PATCH 0533/1112] typo #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 96475f33230..57748d0afc9 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -170,7 +170,7 @@ In the managed case, once a Globus transfer has been initiated a final API call curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA" -Note that the mimetype is multipart/form-data, matching the /addFiles API call. ALso note that the API_TOKEN is not needed when using a signed URL. +Note that the mimetype is multipart/form-data, matching the /addFiles API call. Also note that the API_TOKEN is not needed when using a signed URL. With this information, Dataverse will begin to monitor the transfer and when it completes, will add all files for which the transfer succeeded. As the transfer can take significant time and the API call is asynchronous, the only way to determine if the transfer succeeded via API is to use the standard calls to check the dataset lock state and contents. From 8cc2e7c0e5ba16b2f380f8fd31531e1f90271c12 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 11:56:50 -0500 Subject: [PATCH 0534/1112] fix path in globus endpoint docs #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 57748d0afc9..a9cfe5aedff 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -102,7 +102,7 @@ Once the user identifies which files are to be added, the requestGlobusTransferP export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV export LOCALE=en-US - curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusUpload" + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusUploadPaths" Note that when using the dataverse-globus app or the return from the previous call, the URL for this call will be signed and no API_TOKEN is needed. From c3556e012a03b1e131146821faabb183b1a62a87 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 12:14:24 -0500 Subject: [PATCH 0535/1112] add missing trailing double quote #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index a9cfe5aedff..5a90243bd93 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -168,7 +168,7 @@ In the managed case, once a Globus transfer has been initiated a final API call "files": [{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b3972213f-f6b5c2221423", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "1234"}}, \ {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b39722140-50eb7d3c5ece", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "2345"}}]}' - curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA" + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles" -F "jsonData=$JSON_DATA" Note that the mimetype is multipart/form-data, matching the /addFiles API call. Also note that the API_TOKEN is not needed when using a signed URL. From 50425d3f6e063b7f54d5a49b7bcb758f0ffde3b6 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 Jan 2024 14:20:03 -0500 Subject: [PATCH 0536/1112] only list the OAI sets that have associated records #3322 --- .../harvest/server/OAISetServiceBean.java | 20 +++++++++++++++++++ .../xoai/DataverseXoaiSetRepository.java | 4 ++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java index 2bd666401c7..d5c78c36b98 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java @@ -25,6 +25,7 @@ import jakarta.inject.Named; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; @@ -121,6 +122,25 @@ public List findAllNamedSets() { } } + /** + * "Active" sets are the ones that have been successfully exported, and contain + * a non-zero number of records. (Although a set that contains a number of + * records that are all marked as "deleted" is still an active set!) + * @return list of OAISets + */ + public List findAllActiveNamedSets() { + String jpaQueryString = "select object(o) " + + "from OAISet as o, OAIRecord as r " + + "where r.setName = o.spec " + + "and o.spec != '' " + + "group by o order by o.spec"; + + Query query = em.createQuery(jpaQueryString); + List queryResults = query.getResultList(); + + return queryResults; + } + @Asynchronous public void remove(Long setId) { OAISet oaiSet = find(setId); diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiSetRepository.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiSetRepository.java index b4e275b6059..1e713b08adb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiSetRepository.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiSetRepository.java @@ -35,7 +35,7 @@ public void setSetService(OAISetServiceBean setService) { @Override public boolean supportSets() { - List dataverseOAISets = setService.findAllNamedSets(); + List dataverseOAISets = setService.findAllActiveNamedSets(); if (dataverseOAISets == null || dataverseOAISets.isEmpty()) { return false; @@ -46,7 +46,7 @@ public boolean supportSets() { @Override public List getSets() { logger.fine("calling retrieveSets()"); - List dataverseOAISets = setService.findAllNamedSets(); + List dataverseOAISets = setService.findAllActiveNamedSets(); List XOAISets = new ArrayList(); if (dataverseOAISets != null) { From 15ad04ee96164806036a974dbe5bf41ea2a7f0fa Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 Jan 2024 14:52:24 -0500 Subject: [PATCH 0537/1112] A test for the new "don't list until exported" OAI set feature (#3322) --- .../iq/dataverse/api/HarvestingServerIT.java | 32 +++++++++++++++---- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index e02964ef28f..e0f121305e0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -288,7 +288,7 @@ public void testNativeSetAPI() { } @Test - public void testSetEditAPIandOAIlistSets() { + public void testSetEditAPIandOAIlistSets() throws InterruptedException { // This test focuses on testing the Edit functionality of the Dataverse // OAI Set API and the ListSets method of the Dataverse OAI server. @@ -299,7 +299,8 @@ public void testSetEditAPIandOAIlistSets() { // expected HTTP result codes. String setName = UtilIT.getRandomString(6); - String setDef = "*"; + String persistentId = extraDatasetsIdentifiers.get(0); + String setDef = "dsPersistentId:"+persistentId; // Make sure the set does not exist String setPath = String.format("/api/harvest/server/oaisets/%s", setName); @@ -369,16 +370,35 @@ public void testSetEditAPIandOAIlistSets() { XmlPath responseXmlPath = validateOaiVerbResponse(listSetsResponse, "ListSets"); - // 2. Validate the payload of the response, by confirming that the set + // 2. The set hasn't been exported yet, so it shouldn't be listed in + // ListSets (#3322). Let's confirm that: + + List listSets = responseXmlPath.getList("OAI-PMH.ListSets.set.list().findAll{it.setName=='"+setName+"'}", Node.class); + // 2a. Confirm that our set is listed: + assertNotNull(listSets, "Unexpected response from ListSets"); + assertEquals(0, listSets.size(), "An unexported OAI set is listed in ListSets"); + + // export the set: + + Response exportSetResponse = UtilIT.exportOaiSet(setName); + assertEquals(200, exportSetResponse.getStatusCode()); + Thread.sleep(1000L); // sleep for a sec to be sure + + // ... try again: + + listSetsResponse = UtilIT.getOaiListSets(); + responseXmlPath = validateOaiVerbResponse(listSetsResponse, "ListSets"); + + // 3. Validate the payload of the response, by confirming that the set // we created and modified, above, is being listed by the OAI server // and its xml record is properly formatted - List listSets = responseXmlPath.getList("OAI-PMH.ListSets.set.list().findAll{it.setName=='"+setName+"'}", Node.class); + listSets = responseXmlPath.getList("OAI-PMH.ListSets.set.list().findAll{it.setName=='"+setName+"'}", Node.class); - // 2a. Confirm that our set is listed: + // 3a. Confirm that our set is listed: assertNotNull(listSets, "Unexpected response from ListSets"); assertEquals(1, listSets.size(), "Newly-created set isn't properly listed by the OAI server"); - // 2b. Confirm that the set entry contains the updated description: + // 3b. Confirm that the set entry contains the updated description: assertEquals(newDescription, listSets.get(0).getPath("setDescription.metadata.element.field", String.class), "Incorrect description in the ListSets entry"); // ok, the xml record looks good! From 3a81926980edc7c8228dddf18a8f1305b32fc2c8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 15:40:14 -0500 Subject: [PATCH 0538/1112] add requestGlobusUploadPaths to UtilIT #10200 --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e29677c2252..33dda05b4d7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3718,4 +3718,12 @@ static Response requestGlobusDownload(Integer datasetId, JsonObject body, String .post("/api/datasets/" + datasetId + "/requestGlobusDownload"); } + static Response requestGlobusUploadPaths(Integer datasetId, JsonObject body, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(body.toString()) + .contentType("application/json") + .post("/api/datasets/" + datasetId + "/requestGlobusUploadPaths"); + } + } From 83120012480ce12ef8db3d33d3a1c93c4605945a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 15:47:17 -0500 Subject: [PATCH 0539/1112] clarify where taskIdentifier comes from #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 5a90243bd93..834db8161f0 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -157,7 +157,7 @@ In the remote/reference case, the map is from the initially supplied endpoint/pa Adding Files to the Dataset --------------------------- -In the managed case, once a Globus transfer has been initiated a final API call is made to Dataverse to provide it with the task identifier of the transfer and information about the files being transferred: +In the managed case, you must initiate a Globus transfer and take note of its task identifier. As in the JSON example below, you will pass it as ``taskIdentifier`` along with details about the files you are transferring: .. code-block:: bash From 2f571e23c7b1b98ce530d5a87ed20c8797810175 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 Jan 2024 16:38:18 -0500 Subject: [PATCH 0540/1112] Got rid of some unnecessary database lookups that were made when rendering the harvesting server page. #3322 --- .../iq/dataverse/HarvestingSetsPage.java | 60 +++++++++++++++++-- src/main/java/propertyFiles/Bundle.properties | 2 +- 2 files changed, 56 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java index 6dbba34920b..0b66b652e0c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java @@ -30,6 +30,8 @@ import jakarta.faces.view.ViewScoped; import jakarta.inject.Inject; import jakarta.inject.Named; +import java.util.HashMap; +import java.util.Map; import org.apache.commons.lang3.StringUtils; /** @@ -430,44 +432,92 @@ public boolean isSessionUserAuthenticated() { return false; } + // The numbers of datasets and deleted/exported records below are used + // in rendering rules on the page. They absolutely need to be cached + // on the first lookup. + + Map cachedSetInfoNumDatasets = new HashMap<>(); + public int getSetInfoNumOfDatasets(OAISet oaiSet) { if (oaiSet.isDefaultSet()) { return getSetInfoNumOfExported(oaiSet); } + if (cachedSetInfoNumDatasets.get(oaiSet.getSpec()) != null) { + return cachedSetInfoNumDatasets.get(oaiSet.getSpec()); + } + String query = oaiSet.getDefinition(); try { int num = oaiSetService.validateDefinitionQuery(query); if (num > -1) { + cachedSetInfoNumDatasets.put(oaiSet.getSpec(), num); return num; } } catch (OaiSetException ose) { - // do notghin - will return zero. + // do nothing - will return zero. } + cachedSetInfoNumDatasets.put(oaiSet.getSpec(), 0); return 0; } + Map cachedSetInfoNumExported = new HashMap<>(); + Integer defaultSetNumExported = null; + public int getSetInfoNumOfExported(OAISet oaiSet) { + if (oaiSet.isDefaultSet() && defaultSetNumExported != null) { + return defaultSetNumExported; + } else if (cachedSetInfoNumExported.get(oaiSet.getSpec()) != null) { + return cachedSetInfoNumExported.get(oaiSet.getSpec()); + } + List records = oaiRecordService.findActiveOaiRecordsBySetName(oaiSet.getSpec()); + int num; + if (records == null || records.isEmpty()) { - return 0; + num = 0; + } else { + num = records.size(); } - return records.size(); + if (oaiSet.isDefaultSet()) { + defaultSetNumExported = num; + } else { + cachedSetInfoNumExported.put(oaiSet.getSpec(), num); + } + return num; } + Map cachedSetInfoNumDeleted = new HashMap<>(); + Integer defaultSetNumDeleted = null; + public int getSetInfoNumOfDeleted(OAISet oaiSet) { + if (oaiSet.isDefaultSet() && defaultSetNumDeleted != null) { + return defaultSetNumDeleted; + } else if (cachedSetInfoNumDeleted.get(oaiSet.getSpec()) != null) { + return cachedSetInfoNumDeleted.get(oaiSet.getSpec()); + } + List records = oaiRecordService.findDeletedOaiRecordsBySetName(oaiSet.getSpec()); + int num; + if (records == null || records.isEmpty()) { - return 0; + num = 0; + } else { + num = records.size(); } - return records.size(); + if (oaiSet.isDefaultSet()) { + defaultSetNumDeleted = num; + } else { + cachedSetInfoNumDeleted.put(oaiSet.getSpec(), num); + } + return num; } public void validateSetQuery() { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index ece3f070cdd..157f2ecaf54 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -631,7 +631,7 @@ harvestserver.tab.header.description=Description harvestserver.tab.header.definition=Definition Query harvestserver.tab.col.definition.default=All Published Local Datasets harvestserver.tab.header.stats=Datasets -harvestserver.tab.col.stats.empty=No records (empty set) +harvestserver.tab.col.stats.empty=No active records ({2} {2, choice, 0#records|1#record|2#records} marked as deleted) harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted) harvestserver.tab.header.action=Actions harvestserver.tab.header.action.btn.export=Run Export From d86ab1587cb5088330c2df6565744769cc859119 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Fri, 12 Jan 2024 11:36:30 +0100 Subject: [PATCH 0541/1112] test: use curator role in testListRoleAssignments --- scripts/api/data/role-contributor-plus.json | 12 ---------- .../harvard/iq/dataverse/api/DatasetsIT.java | 22 ++++--------------- 2 files changed, 4 insertions(+), 30 deletions(-) delete mode 100644 scripts/api/data/role-contributor-plus.json diff --git a/scripts/api/data/role-contributor-plus.json b/scripts/api/data/role-contributor-plus.json deleted file mode 100644 index ef9ba3aaff6..00000000000 --- a/scripts/api/data/role-contributor-plus.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "alias":"contributorPlus", - "name":"ContributorPlus", - "description":"For datasets, a person who can edit License + Terms, then submit them for review, and add collaborators.", - "permissions":[ - "ViewUnpublishedDataset", - "EditDataset", - "DownloadFile", - "DeleteDatasetDraft", - "ManageDatasetPermissions" - ] -} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b51d400d2d4..787b9b018a9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1349,17 +1349,11 @@ public void testListRoleAssignments() { Response notPermittedToListRoleAssignmentOnDataset = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, contributorApiToken); assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataset.getStatusCode()); - // We create a new role that includes "ManageDatasetPermissions" which are required for listing role assignments - // of a dataset and assign it to the contributor user + // We assign the curator role to the contributor user + // (includes "ManageDatasetPermissions" which are required for listing role assignments of a dataset, but not + // "ManageDataversePermissions") - String pathToJsonFile = "scripts/api/data/role-contributor-plus.json"; - Response addDataverseRoleResponse = UtilIT.addDataverseRole(pathToJsonFile, dataverseAlias, adminApiToken); - addDataverseRoleResponse.prettyPrint(); - String body = addDataverseRoleResponse.getBody().asString(); - String status = JsonPath.from(body).getString("status"); - assertEquals("OK", status); - - Response giveRandoPermission = UtilIT.grantRoleOnDataset(datasetPersistentId, "contributorPlus", "@" + contributorUsername, adminApiToken); + Response giveRandoPermission = UtilIT.grantRoleOnDataset(datasetPersistentId, "curator", "@" + contributorUsername, adminApiToken); giveRandoPermission.prettyPrint(); assertEquals(200, giveRandoPermission.getStatusCode()); @@ -1373,14 +1367,6 @@ public void testListRoleAssignments() { notPermittedToListRoleAssignmentOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, contributorApiToken); assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataverse.getStatusCode()); - - // Finally, we clean up and delete the role we created - - Response deleteDataverseRoleResponse = UtilIT.deleteDataverseRole("contributorPlus", adminApiToken); - deleteDataverseRoleResponse.prettyPrint(); - body = deleteDataverseRoleResponse.getBody().asString(); - status = JsonPath.from(body).getString("status"); - assertEquals("OK", status); } @Test From 5e9cc2ff4764915324ffc3c990f02e09738101c0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 12 Jan 2024 13:57:59 -0500 Subject: [PATCH 0542/1112] fix bad SQL query in guestbook #10232 --- .../edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index b0cc41eb448..01e6ecf7ff2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -928,7 +928,7 @@ public Long getDownloadCountByDatasetId(Long datasetId, LocalDate date) { if(date != null) { query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId + " and responsetime < '" + date.toString() + "' and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); }else { - query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId+ "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); + query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId+ " and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); } return (Long) query.getSingleResult(); } From d3f3eb9219fa101db8ebfea34ee62ccd3111194a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 12 Jan 2024 14:18:25 -0500 Subject: [PATCH 0543/1112] Update docker-compose-dev.yml better explain presence of settings #9275 --- docker-compose-dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index ce9f39a418a..10fe62ff6df 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -19,7 +19,7 @@ services: DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL: http://keycloak.mydomain.com:8090/realms/test DATAVERSE_JSF_REFRESH_PERIOD: "1" - # to get HarvestingServerIT to pass + # These two oai settings are here to get HarvestingServerIT to pass dataverse_oai_server_maxidentifiers: "2" dataverse_oai_server_maxrecords: "2" JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 From 74b45e1d7d24b621a7368c517e687df0b21f199c Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 16 Jan 2024 10:21:42 -0500 Subject: [PATCH 0544/1112] QA Guide general update --- doc/sphinx-guides/source/qa/index.md | 6 +-- doc/sphinx-guides/source/qa/overview.md | 22 ++++++---- .../source/qa/performance-tests.md | 8 ++++ .../{other-approaches.md => qa-workflow.md} | 41 ++++--------------- ...{manual-testing.md => testing-approach.md} | 9 +++- 5 files changed, 42 insertions(+), 44 deletions(-) rename doc/sphinx-guides/source/qa/{other-approaches.md => qa-workflow.md} (58%) rename doc/sphinx-guides/source/qa/{manual-testing.md => testing-approach.md} (84%) diff --git a/doc/sphinx-guides/source/qa/index.md b/doc/sphinx-guides/source/qa/index.md index 6027f07574f..c7582a2169f 100644 --- a/doc/sphinx-guides/source/qa/index.md +++ b/doc/sphinx-guides/source/qa/index.md @@ -3,9 +3,9 @@ ```{toctree} overview.md testing-infrastructure.md -performance-tests.md -manual-testing.md +qa-workflow.md +testing-approach.md test-automation.md -other-approaches.md jenkins.md +performance-tests.md ``` diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index c4f66446ca3..08740e9345d 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -11,19 +11,27 @@ This guide describes the testing process used by QA at IQSS and provides a refer ## Workflow -The basic workflow is as follows. Bugs or feature requests are submitted to GitHub by the community or by team members as issues. These issues are prioritized and added to a two-week sprint that is reflected on the GitHub {ref}`kanban-board`. As developers work on these issues, a GitHub branch is produced, code is contributed, and a pull request is made to merge these new changes back into the common {ref}`develop branch ` and ultimately released as part of the product. Before a pull request is moved to QA, it must be reviewed by a member of the development team from a coding perspective, and it must pass automated tests. There it is tested manually, exercising the UI (using three common browsers) and any business logic it implements. Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions made in that documentation are tested. Once this passes and any bugs that are found are corrected, and the automated tests are confirmed to be passing, the PR is merged into the develop, the PR is closed, and the branch is deleted (if it is local). At this point, the PR moves from the QA column automatically into the Done column and the process repeats with the next PR until it is decided to {doc}`make a release `. +The basic workflow is as follows. Bugs or feature requests are submitted to GitHub by the community or by team members as [issues](https://github.com/IQSS/dataverse/issues). These issues are prioritized and added to a two-week sprint that is reflected on the GitHub {ref}`kanban-board`. As developers work on these issues, a GitHub branch is produced, code is contributed, and a pull request is made to merge these new changes back into the common {ref}`develop branch ` and ultimately released as part of the product. -## Release Cadence and Sprints +Before a pull request is moved to QA, it must be reviewed by a member of the development team from a coding perspective, and it must pass automated tests. There it is tested manually, exercising the UI (using three common browsers) and any business logic it implements. -A release likely spans multiple two-week sprints. Each sprint represents the priorities for that time and is sized so that the team can reasonably complete most of the work on time. This is a goal to help with planning, it is not a strict requirement. Some issues from the previous sprint may remain and likely be included in the next sprint but occasionally may be deprioritized and deferred to another time. +Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions made in that documentation are tested. Once this passes and any bugs that are found are corrected, and the automated tests are confirmed to be passing, the PR is merged into the develop, the PR is closed, and the branch is deleted (if it is local). At this point, the PR moves from the QA column automatically into the Done column and the process repeats with the next PR until it is decided to {doc}`make a release `. -The decision to make a release can be based on the time since the last release, some important feature needed by the community or contractual deadline, or some other logical reason to package the work completed into a named release and posted to the releases section on GitHub. +## Tips and Tricks -## Performance Testing and Deployment +- Start testing simply, with the most obvious test. You don’t need to know all your tests upfront. As you gain comfort and understanding of how it works, try more tests until you are done. If it is a complex feature, jot down your tests in an outline format, some beforehand as a guide, and some after as things occur to you. Save the doc in a testing folder (on Google Drive). This potentially will help with future testing. +- When in doubt, ask someone. If you are confused about how something is working, it may be something you have missed, or it could be a documentation issue, or it could be a bug! Talk to the code reviewer and the contributor/developer for their opinion and advice. +- Always tail the server.log file while testing. Open a terminal window to the test instance and `tail -F server.log`. This helps you get a real-time sense of what the server is doing when you act and makes it easier to identify any stack trace on failure. +- When overloaded, do the simple pull requests first to reduce the queue. It gives you a mental boost to complete something and reduces the perception of the amount of work still to be done. +- When testing a bug fix, try reproducing the bug on the demo before testing the fix, that way you know you are taking the correct steps to verify that the fix worked. +- When testing an optional feature that requires configuration, do a smoke test without the feature configured and then with it configured. That way you know that folks using the standard config are unaffected by the option if they choose not to configure it. +- Back up your DB before applying an irreversible DB update and you are using a persistent/reusable platform. Just in case it fails, and you need to carry on testing something else you can use the backup. -The final testing activity before producing a release is performance testing. This could be done throughout the release cycle but since it is time-consuming it is done once near the end. Using a load-generating tool named {ref}`Locust `, it loads the statistically most loaded pages, according to Google Analytics, that is 50% homepage and 50% some type of dataset page. Since dataset page weight also varies by the number of files, a selection of about 10 datasets with varying file counts is used. The pages are called randomly as a guest user with increasing levels of user load, from 1 user to 250 users. Typical daily loads in production are around the 50-user level. Though the simulated user level does have a modest amount of random think time before repeated calls, from 5-20 seconds, it is not a real-world load so direct comparisons to production are not reliable. Instead, we compare performance to prior versions of the product, and based on how that performed in production we have some idea whether this might be similar in performance or whether there is some undetected issue that appears under load, such as inefficient or too many DB queries per page. +## Release Cadence and Sprints -Once the performance has been tested and recorded in a [Google spreadsheet](https://docs.google.com/spreadsheets/d/1lwPlifvgu3-X_6xLwq6Zr6sCOervr1mV_InHIWjh5KA/edit?usp=sharing) for this proposed version, the release will be prepared and posted. +A release likely spans multiple two-week sprints. Each sprint represents the priorities for that time and is sized so that the team can reasonably complete most of the work on time. This is a goal to help with planning, it is not a strict requirement. Some issues from the previous sprint may remain and likely be included in the next sprint but occasionally may be deprioritized and deferred to another time. + +The decision to make a release can be based on the time since the last release, some important feature needed by the community or contractual deadline, or some other logical reason to package the work completed into a named release and posted to the releases section on GitHub. ## Making a Release diff --git a/doc/sphinx-guides/source/qa/performance-tests.md b/doc/sphinx-guides/source/qa/performance-tests.md index ad7972bd75e..3fab0386eb0 100644 --- a/doc/sphinx-guides/source/qa/performance-tests.md +++ b/doc/sphinx-guides/source/qa/performance-tests.md @@ -7,8 +7,16 @@ ## Introduction +The final testing activity before producing a release is performance testing. This could be done throughout the release cycle but since it is time-consuming it is done once near the end. Using a load-generating tool named {ref}`Locust `, it loads the statistically most loaded pages, according to Google Analytics, that is 50% homepage and 50% some type of dataset page. + +Since dataset page weight also varies by the number of files, a selection of about 10 datasets with varying file counts is used. The pages are called randomly as a guest user with increasing levels of user load, from 1 user to 250 users. Typical daily loads in production are around the 50-user level. Though the simulated user level does have a modest amount of random think time before repeated calls, from 5-20 seconds, it is not a real-world load so direct comparisons to production are not reliable. Instead, we compare performance to prior versions of the product, and based on how that performed in production we have some idea whether this might be similar in performance or whether there is some undetected issue that appears under load, such as inefficient or too many DB queries per page. + +## Testing Environment + To run performance tests, we have a performance test cluster on AWS that employs web, database, and Solr. The database contains a copy of production that is updated weekly on Sundays. To ensure the homepage content is consistent between test runs across releases, two scripts set the datasets that will appear on the homepage. There is a script on the web server in the default CentOS user dir and one on the database server in the default CentOS user dir. Run these scripts before conducting the tests. +Once the performance has been tested and recorded in a [Google spreadsheet](https://docs.google.com/spreadsheets/d/1lwPlifvgu3-X_6xLwq6Zr6sCOervr1mV_InHIWjh5KA/edit?usp=sharing) for this proposed version, the release will be prepared and posted. + ## Access Access to performance cluster instances requires ssh keys. The cluster itself is normally not running to reduce costs. To turn on the cluster, log on to the demo server and run the perfenv scripts from the centos default user dir. Access to the demo requires an ssh key, see Leonid. diff --git a/doc/sphinx-guides/source/qa/other-approaches.md b/doc/sphinx-guides/source/qa/qa-workflow.md similarity index 58% rename from doc/sphinx-guides/source/qa/other-approaches.md rename to doc/sphinx-guides/source/qa/qa-workflow.md index 2e2ef906191..78dcd1b6322 100644 --- a/doc/sphinx-guides/source/qa/other-approaches.md +++ b/doc/sphinx-guides/source/qa/qa-workflow.md @@ -1,24 +1,10 @@ -# Other Approaches to Deploying and Testing +# QA workflow for Pull Requests ```{contents} Contents: :local: :depth: 3 ``` -This workflow is fine for a single person testing a PR, one at a time. It would be awkward or impossible if there were multiple people wanting to test different PRs at the same time. If a developer is testing, they would likely just deploy to their dev environment. That might be ok, but is the env is fully configured enough to offer a real-world testing scenario? An alternative might be to spin an EC2 branch on AWS, potentially using sample data. This can take some time so another option might be to spin up a few, persistent AWS instances with sample data this way, one per tester, and just deploy new builds there when you want to test. You could even configure Jenkins projects for each if desired to maintain consistency in how they’re built. - -## Tips and Tricks - -- Start testing simply, with the most obvious test. You don’t need to know all your tests upfront. As you gain comfort and understanding of how it works, try more tests until you are done. If it is a complex feature, jot down your tests in an outline format, some beforehand as a guide, and some after as things occur to you. Save the doc in a testing folder (on Google Drive). This potentially will help with future testing. -- When in doubt, ask someone. If you are confused about how something is working, it may be something you have missed, or it could be a documentation issue, or it could be a bug! Talk to the code reviewer and the contributor/developer for their opinion and advice. -- Always tail the server.log file while testing. Open a terminal window to the test instance and `tail -F server.log`. This helps you get a real-time sense of what the server is doing when you act and makes it easier to identify any stack trace on failure. -- When overloaded, do the simple pull requests first to reduce the queue. It gives you a mental boost to complete something and reduces the perception of the amount of work still to be done. -- When testing a bug fix, try reproducing the bug on the demo before testing the fix, that way you know you are taking the correct steps to verify that the fix worked. -- When testing an optional feature that requires configuration, do a smoke test without the feature configured and then with it configured. That way you know that folks using the standard config are unaffected by the option if they choose not to configure it. -- Back up your DB before applying an irreversible DB update and you are using a persistent/reusable platform. Just in case it fails, and you need to carry on testing something else you can use the backup. - -## Workflow for Completing QA on a PR - 1. Assign the PR you are working on to yourself. 1. What does it do? @@ -98,24 +84,13 @@ This workflow is fine for a single person testing a PR, one at a time. It would 1. Merge PR - Click merge to include this PR into the common develop branch. + Click the "Merge pull request" button and be sure to use the "Create a merge commit" option to include this PR into the common develop branch. + + Some of the reasons why we encourage using option over Rebase or Squash are: + -Preserving commit hitory + -Clearer context and treaceability + -Easier collaboration, bug tracking and reverting 1. Delete merged branch - Just a housekeeping move if the PR is from IQSS. Click the delete branch button where the merge button had been. There is no deletion for outside contributions. - - -## Checklist for Completing QA on a PR - -1. Build the docs -1. Smoke test the pr -1. Test the new functionality -1. Regression test -1. Test any upgrade instructions - -## Checklist for QA on Release - -1. Review Consolidated Release Notes, in particular upgrade instructions. -1. Conduct performance testing and compare with the previous release. -1. Perform clean install and smoke test. -1. Potentially follow upgrade instructions. Though they have been performed incrementally for each PR, the sequence may need checking + Just a housekeeping move if the PR is from IQSS. Click the delete branch button where the merge button had been. There is no deletion for outside contributions. \ No newline at end of file diff --git a/doc/sphinx-guides/source/qa/manual-testing.md b/doc/sphinx-guides/source/qa/testing-approach.md similarity index 84% rename from doc/sphinx-guides/source/qa/manual-testing.md rename to doc/sphinx-guides/source/qa/testing-approach.md index 580e5153394..21039c10b1f 100644 --- a/doc/sphinx-guides/source/qa/manual-testing.md +++ b/doc/sphinx-guides/source/qa/testing-approach.md @@ -1,4 +1,4 @@ -# Manual Testing Approach +# Testing Approach ```{contents} Contents: :local: @@ -41,3 +41,10 @@ Think about risk. Is the feature or function part of a critical area such as per 1. Upload 3 different types of files: You can use a tabular file, 50by1000.dta, an image file, and a text file. 1. Publish the dataset. 1. Download a file. + + +## Alternative deployment and testing + +This workflow is fine for a single person testing a PR, one at a time. It would be awkward or impossible if there were multiple people wanting to test different PRs at the same time. If a developer is testing, they would likely just deploy to their dev environment. That might be ok, but is the env is fully configured enough to offer a real-world testing scenario? + +An alternative might be to spin an EC2 branch on AWS, potentially using sample data. This can take some time so another option might be to spin up a few, persistent AWS instances with sample data this way, one per tester, and just deploy new builds there when you want to test. You could even configure Jenkins projects for each if desired to maintain consistency in how they’re built. \ No newline at end of file From ff044632aff9c2b98aea01da934cfbf63476dc40 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 16 Jan 2024 11:32:17 -0500 Subject: [PATCH 0545/1112] add release note #9926 --- doc/release-notes/9926-list-role-assignments-permissions.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9926-list-role-assignments-permissions.md diff --git a/doc/release-notes/9926-list-role-assignments-permissions.md b/doc/release-notes/9926-list-role-assignments-permissions.md new file mode 100644 index 00000000000..43cd83dc5c9 --- /dev/null +++ b/doc/release-notes/9926-list-role-assignments-permissions.md @@ -0,0 +1 @@ +Listing collction/dataverse role assignments via API still requires ManageDataversePermissions, but listing dataset role assignments via API now requires only ManageDatasetPermissions. From 30e357bcfba66a2c7c2044beb4f03d88e532b96a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 16 Jan 2024 12:37:10 -0500 Subject: [PATCH 0546/1112] expect noSetHierarchy rather than noRecordsMatch #9275 --- .../java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index 45dd0c08226..ac28e7a3605 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -888,7 +888,7 @@ public void testNoSuchSetError() { noSuchSet.prettyPrint(); noSuchSet.then().assertThat() .statusCode(OK.getStatusCode()) - .body("oai.error.@code", equalTo("noRecordsMatch")) + .body("oai.error.@code", equalTo("noSetHierarchy")) .body("oai.error", equalTo("Requested set 'census' does not exist")); } From dc08219cc6f7a2b1152c0acfe67b26844daa5abe Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 16 Jan 2024 12:46:32 -0500 Subject: [PATCH 0547/1112] Changes after talking to Phil at 12:00 on Jan 16 --- doc/sphinx-guides/source/qa/index.md | 1 - doc/sphinx-guides/source/qa/jenkins.md | 59 ------------------- doc/sphinx-guides/source/qa/overview.md | 8 ++- doc/sphinx-guides/source/qa/qa-workflow.md | 5 +- .../source/qa/test-automation.md | 58 ++++++++++++++++-- .../source/qa/testing-approach.md | 2 +- 6 files changed, 65 insertions(+), 68 deletions(-) delete mode 100644 doc/sphinx-guides/source/qa/jenkins.md diff --git a/doc/sphinx-guides/source/qa/index.md b/doc/sphinx-guides/source/qa/index.md index c7582a2169f..937b352bccb 100644 --- a/doc/sphinx-guides/source/qa/index.md +++ b/doc/sphinx-guides/source/qa/index.md @@ -6,6 +6,5 @@ testing-infrastructure.md qa-workflow.md testing-approach.md test-automation.md -jenkins.md performance-tests.md ``` diff --git a/doc/sphinx-guides/source/qa/jenkins.md b/doc/sphinx-guides/source/qa/jenkins.md deleted file mode 100644 index 9259284beb9..00000000000 --- a/doc/sphinx-guides/source/qa/jenkins.md +++ /dev/null @@ -1,59 +0,0 @@ -# Jenkins - -```{contents} Contents: -:local: -:depth: 3 -``` - -## Introduction - -Jenkins is our primary tool for knowing if our API tests are passing. (Unit tests are executed locally by developers.) - -You can find our Jenkins installation at . - -Please note that while it has been open to the public in the past, it is currently firewalled off. We can poke a hole in the firewall for your IP address if necessary. Please get in touch. (You might also be interested in which is about restoring the ability of contributors to see if their pull requests are passing API tests or not.) - -## Jobs - -Jenkins is organized into jobs. We'll highlight a few. - -### IQSS-dataverse-develop - -, which we will refer to as the "develop" job runs after pull requests are merged. It is crucial that this job stays green (passing) because we always want to stay in a "release ready" state. If you notice that this job is failing, make noise about it! - -You can get to this job from the README at . - -### IQSS-Dataverse-Develop-PR - - can be thought of as "PR jobs". It's a collection of jobs run on pull requests. Typically, you will navigate directly into the job (and it's particular build number) from a pull request. For example, from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). - -### guides.dataverse.org - - is what we use to build guides. See {doc}`/developers/making-releases` in the Developer Guide. - -## Checking if API Tests are Passing - -If API tests are failing, you should not merge the pull request. - -How can you know if API tests are passing? Here are the steps, by way of example. - -- From the pull request, navigate to the build. For example from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). -- You are now on the new "blue" interface for Jenkins. Click the button with an arrow on the right side of the header called "go to classic" which should take you to (for example) . -- Click "Test Result". -- Under "All Tests", look at the duration for "edu.harvard.iq.dataverse.api". It should be ten minutes or higher. If it was only a few seconds, tests did not run. -- Assuming tests ran, if there were failures, they should appear at the top under "All Failed Tests". Inform the author of the pull request about the error. - -## Diagnosing Failures - -API test failures can have multiple causes. As described above, from the "Test Result" page, you might see the failure under "All Failed Tests". However, the test could have failed because of some underlying system issue. - -If you have determined that the API tests have not run at all, your next step should be to click on "Console Output". For example, . Click "Full log" to see the full log in the browser or navigate to (for example) to get a plain text version. - -Go to the end of the log and then scroll up, looking for the failure. A failed Ansible task can look like this: - -``` -TASK [dataverse : download payara zip] ***************************************** -fatal: [localhost]: FAILED! => {"changed": false, "dest": "/tmp/payara.zip", "elapsed": 10, "msg": "Request failed: ", "url": "https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip"} -``` - -In the example above, if Payara can't be downloaded, we're obviously going to have problems deploying Dataverse to it! diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index 08740e9345d..01ab629db8c 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -33,6 +33,12 @@ A release likely spans multiple two-week sprints. Each sprint represents the pri The decision to make a release can be based on the time since the last release, some important feature needed by the community or contractual deadline, or some other logical reason to package the work completed into a named release and posted to the releases section on GitHub. +## Test API + +The API test suite is added to and maintained by development. (See {doc}`/developers/testing` in the Developer Guide.) It is generally advisable for code contributors to add API tests when adding new functionality. The approach here is one of code coverage: exercise as much of the code base's code paths as possible, every time to catch bugs. + +This type of approach is often used to give contributing developers confidence that their code didn’t introduce any obvious, major issues and is run on each commit. Since it is a broad set of tests, it is not clear whether any specific, conceivable test is run but it does add a lot of confidence that the code base is functioning due to its reach and consistency. (See {doc}`/qa/test-automation` in the Developer Guide.) + ## Making a Release -See {doc}`/developers/making-releases` in the Developer Guide. +See {doc}`/developers/making-releases` in the Developer Guide. \ No newline at end of file diff --git a/doc/sphinx-guides/source/qa/qa-workflow.md b/doc/sphinx-guides/source/qa/qa-workflow.md index 78dcd1b6322..df274d2405d 100644 --- a/doc/sphinx-guides/source/qa/qa-workflow.md +++ b/doc/sphinx-guides/source/qa/qa-workflow.md @@ -1,4 +1,4 @@ -# QA workflow for Pull Requests +# QA Workflow for Pull Requests ```{contents} Contents: :local: @@ -87,7 +87,8 @@ Click the "Merge pull request" button and be sure to use the "Create a merge commit" option to include this PR into the common develop branch. Some of the reasons why we encourage using option over Rebase or Squash are: - -Preserving commit hitory + + -Preserving commit history -Clearer context and treaceability -Easier collaboration, bug tracking and reverting diff --git a/doc/sphinx-guides/source/qa/test-automation.md b/doc/sphinx-guides/source/qa/test-automation.md index c2b649df498..c996b4cea8f 100644 --- a/doc/sphinx-guides/source/qa/test-automation.md +++ b/doc/sphinx-guides/source/qa/test-automation.md @@ -1,15 +1,36 @@ # Test Automation - ```{contents} Contents: :local: :depth: 3 ``` -The API test suite is added to and maintained by development. (See {doc}`/developers/testing` in the Developer Guide.) It is generally advisable for code contributors to add API tests when adding new functionality. The approach here is one of code coverage: exercise as much of the code base's code paths as possible, every time to catch bugs. +## Introduction + +Jenkins is our primary tool for knowing if our API tests are passing. (Unit tests are executed locally by developers.) + +You can find our Jenkins installation at . + +Please note that while it has been open to the public in the past, it is currently firewalled off. We can poke a hole in the firewall for your IP address if necessary. Please get in touch. (You might also be interested in which is about restoring the ability of contributors to see if their pull requests are passing API tests or not.) + +## Jobs + +Jenkins is organized into jobs. We'll highlight a few. + +### IQSS-dataverse-develop -This type of approach is often used to give contributing developers confidence that their code didn’t introduce any obvious, major issues and is run on each commit. Since it is a broad set of tests, it is not clear whether any specific, conceivable test is run but it does add a lot of confidence that the code base is functioning due to its reach and consistency. +, which we will refer to as the "develop" job runs after pull requests are merged. It is crucial that this job stays green (passing) because we always want to stay in a "release ready" state. If you notice that this job is failing, make noise about it! -## Building and Deploying a Pull Request from Jenkins to Dataverse-Internal +You can get to this job from the README at . + +### IQSS-Dataverse-Develop-PR + + can be thought of as "PR jobs". It's a collection of jobs run on pull requests. Typically, you will navigate directly into the job (and it's particular build number) from a pull request. For example, from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). + +### guides.dataverse.org + + is what we use to build guides. See {doc}`/developers/making-releases` in the Developer Guide. + +### Building and Deploying a Pull Request from Jenkins to Dataverse-Internal 1. Log on to GitHub, go to projects, dataverse to see Kanban board, select a pull request to test from the QA queue. @@ -34,3 +55,32 @@ This type of approach is often used to give contributing developers confidence t 1. If that didn't work, you may have run into a Flyway DB script collision error but that should be indicated by the server.log. See {doc}`/developers/sql-upgrade-scripts` in the Developer Guide. 1. Assuming the above steps worked, and they should 99% of the time, test away! Note: be sure to `tail -F server.log` in a terminal window while you are doing any testing. This way you can spot problems that may not appear in the UI and have easier access to any stack traces for easier reporting. + + + +## Checking if API Tests are Passing + +If API tests are failing, you should not merge the pull request. + +How can you know if API tests are passing? Here are the steps, by way of example. + +- From the pull request, navigate to the build. For example from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). +- You are now on the new "blue" interface for Jenkins. Click the button with an arrow on the right side of the header called "go to classic" which should take you to (for example) . +- Click "Test Result". +- Under "All Tests", look at the duration for "edu.harvard.iq.dataverse.api". It should be ten minutes or higher. If it was only a few seconds, tests did not run. +- Assuming tests ran, if there were failures, they should appear at the top under "All Failed Tests". Inform the author of the pull request about the error. + +## Diagnosing Failures + +API test failures can have multiple causes. As described above, from the "Test Result" page, you might see the failure under "All Failed Tests". However, the test could have failed because of some underlying system issue. + +If you have determined that the API tests have not run at all, your next step should be to click on "Console Output". For example, . Click "Full log" to see the full log in the browser or navigate to (for example) to get a plain text version. + +Go to the end of the log and then scroll up, looking for the failure. A failed Ansible task can look like this: + +``` +TASK [dataverse : download payara zip] ***************************************** +fatal: [localhost]: FAILED! => {"changed": false, "dest": "/tmp/payara.zip", "elapsed": 10, "msg": "Request failed: ", "url": "https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip"} +``` + +In the example above, if Payara can't be downloaded, we're obviously going to have problems deploying Dataverse to it! diff --git a/doc/sphinx-guides/source/qa/testing-approach.md b/doc/sphinx-guides/source/qa/testing-approach.md index 21039c10b1f..2c7241999a8 100644 --- a/doc/sphinx-guides/source/qa/testing-approach.md +++ b/doc/sphinx-guides/source/qa/testing-approach.md @@ -43,7 +43,7 @@ Think about risk. Is the feature or function part of a critical area such as per 1. Download a file. -## Alternative deployment and testing +## Alternative Deployment and Testing This workflow is fine for a single person testing a PR, one at a time. It would be awkward or impossible if there were multiple people wanting to test different PRs at the same time. If a developer is testing, they would likely just deploy to their dev environment. That might be ok, but is the env is fully configured enough to offer a real-world testing scenario? From 95cc8cbffb79f8f91ba2e9137c2b3106e4c1f6b5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 16 Jan 2024 14:57:15 -0500 Subject: [PATCH 0548/1112] remove assertion about census not existing (doesn't appear) #9275 --- .../java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index ac28e7a3605..60e4f623992 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -888,8 +888,7 @@ public void testNoSuchSetError() { noSuchSet.prettyPrint(); noSuchSet.then().assertThat() .statusCode(OK.getStatusCode()) - .body("oai.error.@code", equalTo("noSetHierarchy")) - .body("oai.error", equalTo("Requested set 'census' does not exist")); + .body("oai.error.@code", equalTo("noSetHierarchy")); } // TODO: From edd6fc861f899b7ddb07c51fb5d900dbd0096a6c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 16 Jan 2024 16:15:42 -0500 Subject: [PATCH 0549/1112] drop "no such set test" #9275 --- .../edu/harvard/iq/dataverse/api/HarvestingServerIT.java | 9 --------- 1 file changed, 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index 60e4f623992..e77853d6495 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -882,15 +882,6 @@ public void testInvalidQueryParams() { } - @Test - public void testNoSuchSetError() { - Response noSuchSet = given().get("/oai?verb=ListIdentifiers&set=census&metadataPrefix=dc"); - noSuchSet.prettyPrint(); - noSuchSet.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("oai.error.@code", equalTo("noSetHierarchy")); - } - // TODO: // What else can we test? // Some ideas: From 2adbabb31e9206eb1518048a66f98e5853502707 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 17 Jan 2024 12:24:04 +0000 Subject: [PATCH 0550/1112] Added: typeClass field to DatasetFieldType payload --- doc/release-notes/10216-metadatablocks.md | 5 +++-- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 + .../java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/10216-metadatablocks.md b/doc/release-notes/10216-metadatablocks.md index b3be7e76abc..59d9c1640a5 100644 --- a/doc/release-notes/10216-metadatablocks.md +++ b/doc/release-notes/10216-metadatablocks.md @@ -1,4 +1,5 @@ The API endpoint `/api/metadatablocks/{block_id}` has been extended to include the following fields: -- `isRequired` - Whether or not this field is required -- `displayOrder`: The display order of the field in create/edit forms +- `isRequired`: Whether or not this field is required +- `displayOrder`: The display order of the field in create/edit forms +- `typeClass`: The type class of this field ("controlledVocabulary", "compound", or "primitive") diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index a97ef9c12d1..2eaf6b64579 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -565,6 +565,7 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { fieldsBld.add("displayName", fld.getDisplayName()); fieldsBld.add("title", fld.getTitle()); fieldsBld.add("type", fld.getFieldType().toString()); + fieldsBld.add("typeClass", typeClassString(fld)); fieldsBld.add("watermark", fld.getWatermark()); fieldsBld.add("description", fld.getDescription()); fieldsBld.add("multiple", fld.isAllowMultiples()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index f1c3a9815f1..39152bccad8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -27,6 +27,7 @@ void testGetCitationBlock() { .statusCode(OK.getStatusCode()) .body("data.fields.subject.controlledVocabularyValues[0]", CoreMatchers.is("Agricultural Sciences")) .body("data.fields.title.displayOrder", CoreMatchers.is(0)) + .body("data.fields.title.typeClass", CoreMatchers.is("primitive")) .body("data.fields.title.isRequired", CoreMatchers.is(true)); } From ebe95fdb2d81321e9de2d9e3fd3c41aacb474447 Mon Sep 17 00:00:00 2001 From: Katie Mika Date: Wed, 17 Jan 2024 11:35:33 -0500 Subject: [PATCH 0551/1112] Update native-api.rst Added clarification to what is affected in Set Citation Data Field Type for a Dataset --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 09fc3c69693..dbe769e2fd1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1572,8 +1572,8 @@ The fully expanded example above (without environment variables) looks like this Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Sets the dataset citation date field type for a given dataset. ``:publicationDate`` is the default. -Note that the dataset citation date field type must be a date field. +Sets the dataset citation date field type for a given dataset. ``:publicationDate`` is the default. +Note that the dataset citation date field type must be a date field. This change applies to all versions of the dataset that have an entry for the new date field. It also applies to all file citations in the dataset. .. code-block:: bash From 598c40b8e5ccb2bb3db7a839e4549ac4d00ff8e1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 17 Jan 2024 16:10:03 -0500 Subject: [PATCH 0552/1112] replace project 2 with 34 #9157 --- CONTRIBUTING.md | 2 +- doc/sphinx-guides/source/admin/integrations.rst | 2 +- doc/sphinx-guides/source/developers/documentation.rst | 2 +- doc/sphinx-guides/source/developers/version-control.rst | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b2be8f531c4..44f8ae65135 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -56,7 +56,7 @@ If you are interested in working on the main Dataverse code, great! Before you s Please read http://guides.dataverse.org/en/latest/developers/version-control.html to understand how we use the "git flow" model of development and how we will encourage you to create a GitHub issue (if it doesn't exist already) to associate with your pull request. That page also includes tips on making a pull request. -After making your pull request, your goal should be to help it advance through our kanban board at https://github.com/orgs/IQSS/projects/2 . If no one has moved your pull request to the code review column in a timely manner, please reach out. Note that once a pull request is created for an issue, we'll remove the issue from the board so that we only track one card (the pull request). +After making your pull request, your goal should be to help it advance through our kanban board at https://github.com/orgs/IQSS/projects/34 . If no one has moved your pull request to the code review column in a timely manner, please reach out. Note that once a pull request is created for an issue, we'll remove the issue from the board so that we only track one card (the pull request). Thanks for your contribution! diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index db566106b49..cae44d42dbf 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -245,7 +245,7 @@ Future Integrations The `Dataverse Project Roadmap `_ is a good place to see integrations that the core Dataverse Project team is working on. -The `Community Dev `_ column of our project board is a good way to track integrations that are being worked on by the Dataverse Community but many are not listed and if you have an idea for an integration, please ask on the `dataverse-community `_ mailing list if someone is already working on it. +If you have an idea for an integration, please ask on the `dataverse-community `_ mailing list if someone is already working on it. Many integrations take the form of "external tools". See the :doc:`external-tools` section for details. External tool makers should check out the :doc:`/api/external-tools` section of the API Guide. diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index d07b5b63f72..4ec011f2b24 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -18,7 +18,7 @@ If you find a typo or a small error in the documentation you can fix it using Gi - Under the **Write** tab, delete the long welcome message and write a few words about what you fixed. - Click **Create Pull Request**. -That's it! Thank you for your contribution! Your pull request will be added manually to the main Dataverse Project board at https://github.com/orgs/IQSS/projects/2 and will go through code review and QA before it is merged into the "develop" branch. Along the way, developers might suggest changes or make them on your behalf. Once your pull request has been merged you will be listed as a contributor at https://github.com/IQSS/dataverse/graphs/contributors +That's it! Thank you for your contribution! Your pull request will be added manually to the main Dataverse Project board at https://github.com/orgs/IQSS/projects/34 and will go through code review and QA before it is merged into the "develop" branch. Along the way, developers might suggest changes or make them on your behalf. Once your pull request has been merged you will be listed as a contributor at https://github.com/IQSS/dataverse/graphs/contributors Please see https://github.com/IQSS/dataverse/pull/5857 for an example of a quick fix that was merged (the "Files changed" tab shows how a typo was fixed). diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index 12f3d5b81fd..c36c7d1e963 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -142,7 +142,7 @@ Feedback on the pull request template we use is welcome! Here's an example of a Make Sure Your Pull Request Has Been Advanced to Code Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Now that you've made your pull request, your goal is to make sure it appears in the "Code Review" column at https://github.com/orgs/IQSS/projects/2. +Now that you've made your pull request, your goal is to make sure it appears in the "Code Review" column at https://github.com/orgs/IQSS/projects/34. Look at https://github.com/IQSS/dataverse/blob/master/CONTRIBUTING.md for various ways to reach out to developers who have enough access to the GitHub repo to move your issue and pull request to the "Code Review" column. From 2593310b4746fa7022d62c6955db3e69b4d03471 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 17 Jan 2024 16:13:50 -0500 Subject: [PATCH 0553/1112] use "Community Backlog" as "dev efforts" #9157 --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 44f8ae65135..1430ba951a6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -64,4 +64,4 @@ Thanks for your contribution! [Community Call]: https://dataverse.org/community-calls [dataverse-dev Google Group]: https://groups.google.com/group/dataverse-dev [community contributors]: https://docs.google.com/spreadsheets/d/1o9DD-MQ0WkrYaEFTD5rF_NtyL8aUISgURsAXSL7Budk/edit?usp=sharing -[dev efforts]: https://github.com/orgs/IQSS/projects/2#column-5298405 +[dev efforts]: https://github.com/orgs/IQSS/projects/34/views/6 From 4f3a6ac3c038d920b7eb687a1eae6b7871e6eba8 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 18 Jan 2024 12:43:43 -0500 Subject: [PATCH 0554/1112] Add fix for SQL on guestbook service bean --- .../edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 01e6ecf7ff2..04f1ebf4bd0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -914,7 +914,7 @@ public void save(GuestbookResponse guestbookResponse) { public Long getDownloadCountByDataFileId(Long dataFileId) { // datafile id is null, will return 0 - Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.datafile_id = " + dataFileId + "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); + Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.datafile_id = " + dataFileId + " and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); return (Long) query.getSingleResult(); } From eb6da705e1c2dcf4e657326a09646a47bec8cb88 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 18 Jan 2024 14:11:37 -0500 Subject: [PATCH 0555/1112] Add fix for same issue on another query reported by Jim Myers --- .../edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 04f1ebf4bd0..6c043b78941 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -432,7 +432,7 @@ public Long findCountByGuestbookId(Long guestbookId, Long dataverseId) { Query query = em.createNativeQuery(queryString); return (Long) query.getSingleResult(); } else { - String queryString = "select count(o) from GuestbookResponse as o, Dataset d, DvObject obj where o.dataset_id = d.id and d.id = obj.id and obj.owner_id = " + dataverseId + "and o.guestbook_id = " + guestbookId; + String queryString = "select count(o) from GuestbookResponse as o, Dataset d, DvObject obj where o.dataset_id = d.id and d.id = obj.id and obj.owner_id = " + dataverseId + " and o.guestbook_id = " + guestbookId; Query query = em.createNativeQuery(queryString); return (Long) query.getSingleResult(); } From 867b7dcc8244e0ea4396ef1ef0dcadec40ce6b2c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 18 Jan 2024 14:58:14 -0500 Subject: [PATCH 0556/1112] a better test setup (#3322) --- .../harvard/iq/dataverse/api/HarvestingServerIT.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index e0f121305e0..ed9cbdaaed0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -299,8 +299,7 @@ public void testSetEditAPIandOAIlistSets() throws InterruptedException { // expected HTTP result codes. String setName = UtilIT.getRandomString(6); - String persistentId = extraDatasetsIdentifiers.get(0); - String setDef = "dsPersistentId:"+persistentId; + String setDefinition = "title:Sample"; // Make sure the set does not exist String setPath = String.format("/api/harvest/server/oaisets/%s", setName); @@ -313,20 +312,21 @@ public void testSetEditAPIandOAIlistSets() throws InterruptedException { // Create the set as admin user Response createSetResponse = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey) - .body(jsonForTestSpec(setName, setDef)) + .body(jsonForTestSpec(setName, setDefinition)) .post(createPath); assertEquals(201, createSetResponse.getStatusCode()); // I. Test the Modify/Edit (POST method) functionality of the // Dataverse OAI Sets API - String newDefinition = "title:New"; + String persistentId = extraDatasetsIdentifiers.get(0); + String newDefinition = "dsPersistentId:"+persistentId; String newDescription = "updated"; // API Test 1. Try to modify the set as normal user, should fail Response editSetResponse = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, normalUserAPIKey) - .body(jsonForEditSpec(setName, setDef, "")) + .body(jsonForEditSpec(setName, newDefinition, "")) .put(setPath); logger.info("non-admin user editSetResponse.getStatusCode(): " + editSetResponse.getStatusCode()); assertEquals(400, editSetResponse.getStatusCode()); From 091629a6b9db2a3d1b879817a162b4309c040d15 Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Fri, 19 Jan 2024 12:28:41 +0100 Subject: [PATCH 0557/1112] Add configuration for automatic XHTML/CSS/etc. reloading in IDEA in docker When running Dataverse in Docker we still want to be able to just edit things under src/main/webapp and then just reload the web page to see the changes. To do this: 1. Mapped Payara /opt/payara/appserver/glassfish/domains/domain1/applications folder to ./docker-dev-volumes/glassfish/applications 2. Added watchers.xml File watcher configuration, which can be imported into IDEA to ... 3. ... run cpwebapp.sh to copy changed files under src/main/webapp to ./docker-dev-volumes/glassfish/applications/dataverse-{current version} --- docker-compose-dev.yml | 2 ++ scripts/intellij/cpwebapp.sh | 33 +++++++++++++++++++++++++++++++++ scripts/intellij/watchers.xml | 22 ++++++++++++++++++++++ 3 files changed, 57 insertions(+) create mode 100755 scripts/intellij/cpwebapp.sh create mode 100644 scripts/intellij/watchers.xml diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 10fe62ff6df..76a4c8a745d 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -60,6 +60,8 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets + # Map the glassfish applications folder so that we can update webapp resources using scripts/intellij/cpwebapp.sh + - ./docker-dev-volumes/glassfish/applications:/opt/payara/appserver/glassfish/domains/domain1/applications # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). # Replace 6.0 with the current version. # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse diff --git a/scripts/intellij/cpwebapp.sh b/scripts/intellij/cpwebapp.sh new file mode 100755 index 00000000000..6ecad367048 --- /dev/null +++ b/scripts/intellij/cpwebapp.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# +# cpwebapp +# +# Usage: +# +# Add a File watcher by importing watchers.xml into IntelliJ IDEA, and let it do the copying whenever you save a +# file under webapp. +# +# https://www.jetbrains.com/help/idea/settings-tools-file-watchers.html +# +# Alternatively, you can add an External tool and trigger via menu or shortcut to do the copying manually: +# +# https://www.jetbrains.com/help/idea/configuring-third-party-tools.html +# + +PROJECT_DIR=$1 +FILE_TO_COPY=$2 +RELATIVE_PATH="${FILE_TO_COPY#$PROJECT_DIR/}" + +# Check if RELATIVE_PATH starts with 'src/main/webapp', otherwise ignore +if [[ $RELATIVE_PATH == src/main/webapp* ]]; then + # Get current version. Any other way to do this? A simple VERSION file would help. + VERSION=`perl -ne 'print $1 if /(.*?)<\/revision>/' ./modules/dataverse-parent/pom.xml` + RELATIVE_PATH_WITHOUT_WEBAPP="${RELATIVE_PATH#src/main/webapp/}" + TARGET_DIR=./docker-dev-volumes/glassfish/applications/dataverse-$VERSION + TARGET_PATH="${TARGET_DIR}/${RELATIVE_PATH_WITHOUT_WEBAPP}" + + mkdir -p "$(dirname "$TARGET_PATH")" + cp "$FILE_TO_COPY" "$TARGET_PATH" + + echo "File $FILE_TO_COPY copied to $TARGET_PATH" +fi diff --git a/scripts/intellij/watchers.xml b/scripts/intellij/watchers.xml new file mode 100644 index 00000000000..e118fea558f --- /dev/null +++ b/scripts/intellij/watchers.xml @@ -0,0 +1,22 @@ + + + + + \ No newline at end of file From cb08667a77a2ea2a51093c81e6048ee9b5b1ef30 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Fri, 19 Jan 2024 15:10:17 -0500 Subject: [PATCH 0558/1112] #10249 correct typo in search API documentation --- doc/sphinx-guides/source/api/search.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst index b941064f173..e8d0a0b3ea7 100755 --- a/doc/sphinx-guides/source/api/search.rst +++ b/doc/sphinx-guides/source/api/search.rst @@ -25,7 +25,7 @@ Parameters Name Type Description =============== ======= =========== q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://demo.dataverse.org/api/search?q=title:data . For a list of fields to search, please see https://github.com/IQSS/dataverse/issues/2558 (for now). -type string Can be either "Dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://demo.dataverse.org/api/search?q=*&type=dataset +type string Can be either "dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://demo.dataverse.org/api/search?q=*&type=dataset subtree string The identifier of the Dataverse collection to which the search should be narrowed. The subtree of this Dataverse collection and all its children will be searched. Multiple "subtree" parameters can be used to include multiple Dataverse collections. For example, https://demo.dataverse.org/api/search?q=data&subtree=birds&subtree=cats . sort string The sort field. Supported values include "name" and "date". See example under "order". order string The order in which to sort. Can either be "asc" or "desc". For example, https://demo.dataverse.org/api/search?q=data&sort=name&order=asc From fc28b37a9bdc847f04f1988f922a1414b1c70527 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Mon, 22 Jan 2024 13:17:38 -0500 Subject: [PATCH 0559/1112] bump google.library.version to 26.30.0 per Jim --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index e2d1ceec539..386d4934cb1 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -152,7 +152,7 @@ 42.6.0 9.3.0 1.12.290 - 26.29.0 + 26.30.0 8.0.0 From f902a3ec75a6ca1d23d81f02585902b5873c1fbd Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 18 Jan 2024 15:55:32 -0500 Subject: [PATCH 0560/1112] add API endpoint to return file citation #10240 --- .../edu/harvard/iq/dataverse/api/Files.java | 18 +++++++++ .../edu/harvard/iq/dataverse/api/FilesIT.java | 40 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 7 ++++ 3 files changed, 65 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 5d400ee1438..f4282b794b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -2,6 +2,7 @@ import com.google.gson.Gson; import com.google.gson.JsonObject; +import edu.harvard.iq.dataverse.DataCitation; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.DataFileTag; @@ -931,4 +932,21 @@ public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathPar return ok(dataFileServiceBean.hasBeenDeleted(dataFile)); }, getRequestUser(crc)); } + + @GET + @AuthRequired + @Path("{id}/citation") + public Response getFileCitation(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId) { + try { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + final DataFile df = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); + FileMetadata fm = df.getLatestFileMetadata(); + boolean direct = false; + DataCitation citation = new DataCitation(fm, direct); + return ok(citation.toString(true)); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 915f82a6de2..853d92aac0e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -33,6 +33,7 @@ import jakarta.json.JsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.*; +import java.time.Year; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.jupiter.api.AfterAll; @@ -2483,4 +2484,43 @@ public void testCollectionStorageQuotas() { UtilIT.deleteSetting(SettingsServiceBean.Key.UseStorageQuotas); } + + @Test + public void getFileCitation() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String datasetPid = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + + Path pathToTxt = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "file.txt"); + String contentOfTxt = "foobar"; + java.nio.file.Files.write(pathToTxt, contentOfTxt.getBytes()); + + Response uploadFileTxt = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTxt.toString(), apiToken); + uploadFileTxt.prettyPrint(); + uploadFileTxt.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("file.txt")); + + Integer fileId = JsonPath.from(uploadFileTxt.body().asString()).getInt("data.files[0].dataFile.id"); + + String pidAsUrl = "https://doi.org/" + datasetPid.split("doi:")[1]; + int currentYear = Year.now().getValue(); + + Response getFileCitation = UtilIT.getFileCitation(fileId, true, apiToken); + getFileCitation.prettyPrint(); + getFileCitation.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; file.txt [fileName]")); + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 6af3f8a0a09..9b9f8ddff47 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3459,6 +3459,13 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, boo return response; } + static Response getFileCitation(Integer fileId, boolean getDraft, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + fileId + "/citation"); + return response; + } + static Response getVersionFiles(Integer datasetId, String version, Integer limit, From 85018f5182fbdd8f59dad75e9e9612ac7c657c54 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 22 Jan 2024 10:42:28 -0500 Subject: [PATCH 0561/1112] make assertings on draft vs published #10240 --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 23 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 9 ++++---- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 853d92aac0e..49e6c5c4f22 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2516,11 +2516,34 @@ public void getFileCitation() throws IOException { String pidAsUrl = "https://doi.org/" + datasetPid.split("doi:")[1]; int currentYear = Year.now().getValue(); + Response draftUnauthNoApitoken = UtilIT.getFileCitation(fileId, true, null); + draftUnauthNoApitoken.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + Response createNoPermsUser = UtilIT.createRandomUser(); + createNoPermsUser.then().assertThat().statusCode(OK.getStatusCode()); + String noPermsApiToken = UtilIT.getApiTokenFromResponse(createNoPermsUser); + + Response draftUnauthNoPermsApiToken = UtilIT.getFileCitation(fileId, true, noPermsApiToken); + draftUnauthNoPermsApiToken.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + Response getFileCitation = UtilIT.getFileCitation(fileId, true, apiToken); getFileCitation.prettyPrint(); getFileCitation.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; file.txt [fileName]")); + + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishedNoApiTokenNeeded = UtilIT.getFileCitation(fileId, true, null); + publishedNoApiTokenNeeded.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishedNoPermsApiTokenAllowed = UtilIT.getFileCitation(fileId, true, noPermsApiToken); + publishedNoPermsApiTokenAllowed.then().assertThat().statusCode(OK.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 9b9f8ddff47..946bc6d5c83 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3460,10 +3460,11 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, boo } static Response getFileCitation(Integer fileId, boolean getDraft, String apiToken) { - Response response = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/files/" + fileId + "/citation"); - return response; + var spec = given(); + if (apiToken != null) { + spec.header(API_TOKEN_HTTP_HEADER, apiToken); + } + return spec.get("/api/files/" + fileId + "/citation"); } static Response getVersionFiles(Integer datasetId, From f34f82be7281e05cf80cee461ed948187f0a537b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 22 Jan 2024 16:20:43 -0500 Subject: [PATCH 0562/1112] handle versions for "get data file citation" API #10240 --- .../edu/harvard/iq/dataverse/api/Files.java | 48 +++++++++++++++++-- .../edu/harvard/iq/dataverse/api/FilesIT.java | 45 ++++++++++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 4 +- 3 files changed, 85 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index f4282b794b1..c30503199e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; @@ -28,11 +29,16 @@ import edu.harvard.iq.dataverse.datasetutility.DataFileTagException; import edu.harvard.iq.dataverse.datasetutility.NoFilesException; import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.impl.GetDataFileCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetDraftFileMetadataIfAvailableCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.RedetectFileTypeCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UningestFileCommand; @@ -933,14 +939,50 @@ public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathPar }, getRequestUser(crc)); } + /** + * @param fileIdOrPersistentId Database ID or PID of the data file. + * @param dsVersionString The version of the dataset, such as 1.0, :draft, + * :latest-published, etc. + */ @GET @AuthRequired - @Path("{id}/citation") - public Response getFileCitation(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId) { + @Path("{id}/versions/{dsVersionString}/citation") + public Response getFileCitationByVersion(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("dsVersionString") String dsVersionString) { try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); final DataFile df = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); - FileMetadata fm = df.getLatestFileMetadata(); + Dataset ds = df.getOwner(); + // Adapted from getDatasetVersionOrDie + // includeDeaccessioned and checkPermsWhenDeaccessioned were removed + // because they aren't needed. + DatasetVersion dsv = execCommand(handleVersion(dsVersionString, new Datasets.DsVersionHandler>() { + + @Override + public Command handleLatest() { + return new GetLatestAccessibleDatasetVersionCommand(req, ds); + } + + @Override + public Command handleDraft() { + return new GetDraftDatasetVersionCommand(req, ds); + } + + @Override + public Command handleSpecific(long major, long minor) { + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); + } + + @Override + public Command handleLatestPublished() { + return new GetLatestPublishedDatasetVersionCommand(req, ds); + } + })); + + Long getDatasetVersionID = dsv.getId(); + FileMetadata fm = dataFileServiceBean.findFileMetadataByDatasetVersionIdAndDataFileId(getDatasetVersionID, df.getId()); + if (fm == null) { + return notFound("File could not be found."); + } boolean direct = false; DataCitation citation = new DataCitation(fm, direct); return ok(citation.toString(true)); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 49e6c5c4f22..4bc7456e7e7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -17,6 +17,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.io.File; import java.io.IOException; @@ -2501,6 +2502,13 @@ public void getFileCitation() throws IOException { Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); String datasetPid = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, false, apiToken); + getDatasetVersionCitationResponse.prettyPrint(); + getDatasetVersionCitationResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + // We check that the returned message contains information expected for the citation string + .body("data.message", containsString("DRAFT VERSION")); + Path pathToTxt = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "file.txt"); String contentOfTxt = "foobar"; java.nio.file.Files.write(pathToTxt, contentOfTxt.getBytes()); @@ -2516,19 +2524,21 @@ public void getFileCitation() throws IOException { String pidAsUrl = "https://doi.org/" + datasetPid.split("doi:")[1]; int currentYear = Year.now().getValue(); - Response draftUnauthNoApitoken = UtilIT.getFileCitation(fileId, true, null); + Response draftUnauthNoApitoken = UtilIT.getFileCitation(fileId, DS_VERSION_DRAFT, null); + draftUnauthNoApitoken.prettyPrint(); draftUnauthNoApitoken.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); Response createNoPermsUser = UtilIT.createRandomUser(); createNoPermsUser.then().assertThat().statusCode(OK.getStatusCode()); String noPermsApiToken = UtilIT.getApiTokenFromResponse(createNoPermsUser); - Response draftUnauthNoPermsApiToken = UtilIT.getFileCitation(fileId, true, noPermsApiToken); + Response draftUnauthNoPermsApiToken = UtilIT.getFileCitation(fileId, DS_VERSION_DRAFT, noPermsApiToken); + draftUnauthNoPermsApiToken.prettyPrint(); draftUnauthNoPermsApiToken.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); - Response getFileCitation = UtilIT.getFileCitation(fileId, true, apiToken); - getFileCitation.prettyPrint(); - getFileCitation.then().assertThat() + Response getFileCitationDraft = UtilIT.getFileCitation(fileId, DS_VERSION_DRAFT, apiToken); + getFileCitationDraft.prettyPrint(); + getFileCitationDraft.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; file.txt [fileName]")); @@ -2538,12 +2548,33 @@ public void getFileCitation() throws IOException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response publishedNoApiTokenNeeded = UtilIT.getFileCitation(fileId, true, null); + Response publishedNoApiTokenNeeded = UtilIT.getFileCitation(fileId, "1.0", null); publishedNoApiTokenNeeded.then().assertThat().statusCode(OK.getStatusCode()); - Response publishedNoPermsApiTokenAllowed = UtilIT.getFileCitation(fileId, true, noPermsApiToken); + Response publishedNoPermsApiTokenAllowed = UtilIT.getFileCitation(fileId, "1.0", noPermsApiToken); publishedNoPermsApiTokenAllowed.then().assertThat().statusCode(OK.getStatusCode()); + String updateJsonString = """ +{ + "label": "foo.txt" +} +"""; + + Response updateMetadataResponse = UtilIT.updateFileMetadata(fileId.toString(), updateJsonString, apiToken); + updateMetadataResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), updateMetadataResponse.getStatusCode()); + + Response getFileCitationPostV1Draft = UtilIT.getFileCitation(fileId, DS_VERSION_DRAFT, apiToken); + getFileCitationPostV1Draft.prettyPrint(); + getFileCitationPostV1Draft.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; foo.txt [fileName]")); + + Response getFileCitationV1Filename = UtilIT.getFileCitation(fileId, "1.0", apiToken); + getFileCitationV1Filename.prettyPrint(); + getFileCitationV1Filename.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1; file.txt [fileName]")); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 946bc6d5c83..520d68428a3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3459,12 +3459,12 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, boo return response; } - static Response getFileCitation(Integer fileId, boolean getDraft, String apiToken) { + static Response getFileCitation(Integer fileId, String datasetVersion, String apiToken) { var spec = given(); if (apiToken != null) { spec.header(API_TOKEN_HTTP_HEADER, apiToken); } - return spec.get("/api/files/" + fileId + "/citation"); + return spec.get("/api/files/" + fileId + "/versions/" + datasetVersion + "/citation"); } static Response getVersionFiles(Integer datasetId, From a28e15a9316cb1f4d726ddd0afee6cd817324c3b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 23 Jan 2024 10:22:55 -0500 Subject: [PATCH 0563/1112] #9686 display harvesting client info on cards of harvested objects --- .../iq/dataverse/DatasetServiceBean.java | 48 ------------------- .../iq/dataverse/DvObjectServiceBean.java | 48 +++++++++++++++++++ .../search/SearchIncludeFragment.java | 41 ++++++++++------ .../harvard/iq/dataverse/api/DatasetsIT.java | 2 + 4 files changed, 76 insertions(+), 63 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index c6df2a2e1ab..4c4aafdd1ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -583,54 +583,6 @@ public Long getDatasetVersionCardImage(Long versionId, User user) { return null; } - /** - * Used to identify and properly display Harvested objects on the dataverse page. - * - * @param datasetIds - * @return - */ - public Map getArchiveDescriptionsForHarvestedDatasets(Set datasetIds){ - if (datasetIds == null || datasetIds.size() < 1) { - return null; - } - - String datasetIdStr = StringUtils.join(datasetIds, ", "); - - String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")"; - List searchResults; - - try { - searchResults = em.createNativeQuery(qstr).getResultList(); - } catch (Exception ex) { - searchResults = null; - } - - if (searchResults == null) { - return null; - } - - Map ret = new HashMap<>(); - - for (Object[] result : searchResults) { - Long dsId; - if (result[0] != null) { - try { - dsId = (Long)result[0]; - } catch (Exception ex) { - dsId = null; - } - if (dsId == null) { - continue; - } - - ret.put(dsId, (String)result[1]); - } - } - - return ret; - } - - public boolean isDatasetCardImageAvailable(DatasetVersion datasetVersion, User user) { if (datasetVersion == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java index d4219c36149..58a246b364a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java @@ -383,6 +383,54 @@ public Map getObjectPathsByIds(Set objectIds){ return ret; } + /** + * Used to identify and properly display Harvested objects on the dataverse page. + * + * @param dvObjectIds + * @return + */ + public Map getArchiveDescriptionsForHarvestedDvObjects(Set dvObjectIds){ + + if (dvObjectIds == null || dvObjectIds.size() < 1) { + return null; + } + + String dvObjectIsString = StringUtils.join(dvObjectIds, ", "); + String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, DvObject d WHERE d.harvestingClient_id = h.id AND d.id IN (" + dvObjectIsString + ")"; + List searchResults; + + try { + searchResults = em.createNativeQuery(qstr).getResultList(); + } catch (Exception ex) { + searchResults = null; + } + + if (searchResults == null) { + return null; + } + + Map ret = new HashMap<>(); + + for (Object[] result : searchResults) { + Long dvObjId; + if (result[0] != null) { + try { + Integer castResult = (Integer) result[0]; + dvObjId = Long.valueOf(castResult); + } catch (Exception ex) { + dvObjId = null; + } + if (dvObjId == null) { + continue; + } + ret.put(dvObjId, (String)result[1]); + } + } + + return ret; + } + + public String generateNewIdentifierByStoredProcedure() { StoredProcedureQuery query = this.em.createNamedStoredProcedureQuery("Dataset.generateIdentifierFromStoredProcedure"); query.execute(); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 5a5d8781726..939b39b94ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -1367,6 +1367,7 @@ public boolean canPublishDataset(Long datasetId){ public void setDisplayCardValues() { Set harvestedDatasetIds = null; + Set harvestedFileIds = null; for (SolrSearchResult result : searchResultsList) { //logger.info("checking DisplayImage for the search result " + i++); if (result.getType().equals("dataverses")) { @@ -1392,10 +1393,10 @@ public void setDisplayCardValues() { } else if (result.getType().equals("files")) { result.setImageUrl(thumbnailServiceWrapper.getFileCardImageAsBase64Url(result)); if (result.isHarvested()) { - if (harvestedDatasetIds == null) { - harvestedDatasetIds = new HashSet<>(); + if (harvestedFileIds == null) { + harvestedFileIds = new HashSet<>(); } - harvestedDatasetIds.add(result.getParentIdAsLong()); + harvestedFileIds.add(result.getEntityId()); } } } @@ -1407,25 +1408,35 @@ public void setDisplayCardValues() { // SQL query: if (harvestedDatasetIds != null) { - Map descriptionsForHarvestedDatasets = datasetService.getArchiveDescriptionsForHarvestedDatasets(harvestedDatasetIds); - if (descriptionsForHarvestedDatasets != null && descriptionsForHarvestedDatasets.size() > 0) { + Map descriptionsForHarvestedDatasets = dvObjectService.getArchiveDescriptionsForHarvestedDvObjects(harvestedDatasetIds); + if (descriptionsForHarvestedDatasets != null && !descriptionsForHarvestedDatasets.isEmpty()) { for (SolrSearchResult result : searchResultsList) { - if (result.isHarvested()) { - if (result.getType().equals("files")) { - if (descriptionsForHarvestedDatasets.containsKey(result.getParentIdAsLong())) { - result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getParentIdAsLong())); - } - } else if (result.getType().equals("datasets")) { - if (descriptionsForHarvestedDatasets.containsKey(result.getEntityId())) { - result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getEntityId())); - } - } + if (result.isHarvested() && result.getType().equals("datasets") && descriptionsForHarvestedDatasets.containsKey(result.getEntityId())) { + result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getEntityId())); } } } descriptionsForHarvestedDatasets = null; harvestedDatasetIds = null; } + + if (harvestedFileIds != null) { + + Map descriptionsForHarvestedFiles = dvObjectService.getArchiveDescriptionsForHarvestedDvObjects(harvestedFileIds); + if (descriptionsForHarvestedFiles != null && !descriptionsForHarvestedFiles.isEmpty()) { + for (SolrSearchResult result : searchResultsList) { + if (result.isHarvested() && result.getType().equals("files") && descriptionsForHarvestedFiles.containsKey(result.getEntityId())) { + + result.setHarvestingDescription(descriptionsForHarvestedFiles.get(result.getEntityId())); + + } + } + } + descriptionsForHarvestedFiles = null; + harvestedDatasetIds = null; + + } + // determine which of the objects are linked: diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 9b51be4b365..087db4858b2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2548,6 +2548,8 @@ public void testLinkingDatasets() { EntityManager entityManager = entityManagerFactory.createEntityManager(); entityManager.getTransaction().begin(); // Do stuff... + //SEK 01/22/2024 - as of 6.2 harvestingclient_id will be on the dv object table + // so if this is ever implemented change will probably need to happen in the updatequery below entityManager.createNativeQuery("UPDATE dataset SET harvestingclient_id=1 WHERE id="+datasetId2).executeUpdate(); entityManager.getTransaction().commit(); entityManager.close(); From 88bae3bb295c26e7eda57d1ad5fbb34b67788542 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 23 Jan 2024 10:59:46 -0500 Subject: [PATCH 0564/1112] #9686 fix script names --- ...emetadata.sql => V6.1.0.1__9728-universe-variablemetadata.sql} | 0 ...gclient-id.sql => V6.1.0.2__9686-move-harvestingclient-id.sql} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.13.0.3__9728-universe-variablemetadata.sql => V6.1.0.1__9728-universe-variablemetadata.sql} (100%) rename src/main/resources/db/migration/{V6.1.0.1__9686-move-harvestingclient-id.sql => V6.1.0.2__9686-move-harvestingclient-id.sql} (100%) diff --git a/src/main/resources/db/migration/V5.13.0.3__9728-universe-variablemetadata.sql b/src/main/resources/db/migration/V6.1.0.1__9728-universe-variablemetadata.sql similarity index 100% rename from src/main/resources/db/migration/V5.13.0.3__9728-universe-variablemetadata.sql rename to src/main/resources/db/migration/V6.1.0.1__9728-universe-variablemetadata.sql diff --git a/src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql b/src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.1__9686-move-harvestingclient-id.sql rename to src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql From 7d27a9b64736780314ed3a203990d701db2ab399 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 23 Jan 2024 11:17:50 -0500 Subject: [PATCH 0565/1112] #10255 fix script name --- ...emetadata.sql => V6.1.0.1__9728-universe-variablemetadata.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.13.0.3__9728-universe-variablemetadata.sql => V6.1.0.1__9728-universe-variablemetadata.sql} (100%) diff --git a/src/main/resources/db/migration/V5.13.0.3__9728-universe-variablemetadata.sql b/src/main/resources/db/migration/V6.1.0.1__9728-universe-variablemetadata.sql similarity index 100% rename from src/main/resources/db/migration/V5.13.0.3__9728-universe-variablemetadata.sql rename to src/main/resources/db/migration/V6.1.0.1__9728-universe-variablemetadata.sql From c999ac7721e4202a36790c23ab8acadf95b6ba8c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Jan 2024 11:28:04 -0500 Subject: [PATCH 0566/1112] handle deaccessioned versions #10240 --- .../edu/harvard/iq/dataverse/api/Files.java | 11 ++++-- .../edu/harvard/iq/dataverse/api/FilesIT.java | 34 ++++++++++++++++--- 2 files changed, 38 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index c30503199e0..ed331e6835d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -953,10 +953,11 @@ public Response getFileCitationByVersion(@Context ContainerRequestContext crc, @ final DataFile df = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); Dataset ds = df.getOwner(); // Adapted from getDatasetVersionOrDie - // includeDeaccessioned and checkPermsWhenDeaccessioned were removed - // because they aren't needed. DatasetVersion dsv = execCommand(handleVersion(dsVersionString, new Datasets.DsVersionHandler>() { + boolean includeDeaccessioned = true; + boolean checkPermsWhenDeaccessioned = true; + @Override public Command handleLatest() { return new GetLatestAccessibleDatasetVersionCommand(req, ds); @@ -969,7 +970,7 @@ public Command handleDraft() { @Override public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned, checkPermsWhenDeaccessioned); } @Override @@ -978,6 +979,10 @@ public Command handleLatestPublished() { } })); + if (dsv == null) { + return unauthorized("Dataset version cannot be found or unauthorized."); + } + Long getDatasetVersionID = dsv.getId(); FileMetadata fm = dataFileServiceBean.findFileMetadataByDatasetVersionIdAndDataFileId(getDatasetVersionID, df.getId()); if (fm == null) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 4bc7456e7e7..1e8a806faa2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2487,7 +2487,7 @@ public void testCollectionStorageQuotas() { } @Test - public void getFileCitation() throws IOException { + public void testFileCitation() throws IOException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -2570,11 +2570,37 @@ public void getFileCitation() throws IOException { .statusCode(OK.getStatusCode()) .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; foo.txt [fileName]")); - Response getFileCitationV1Filename = UtilIT.getFileCitation(fileId, "1.0", apiToken); - getFileCitationV1Filename.prettyPrint(); - getFileCitationV1Filename.then().assertThat() + Response getFileCitationV1OldFilename = UtilIT.getFileCitation(fileId, "1.0", apiToken); + getFileCitationV1OldFilename.prettyPrint(); + getFileCitationV1OldFilename.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1; file.txt [fileName]")); + + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken) + .then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDataset = UtilIT.deaccessionDataset(datasetId, "1.0", "just because", "http://example.com", apiToken); + deaccessionDataset.prettyPrint(); + deaccessionDataset.then().assertThat().statusCode(OK.getStatusCode()); + + Response getFileCitationV1PostDeaccessionAuthor = UtilIT.getFileCitation(fileId, "1.0", apiToken); + getFileCitationV1PostDeaccessionAuthor.prettyPrint(); + getFileCitationV1PostDeaccessionAuthor.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1, DEACCESSIONED VERSION; file.txt [fileName]")); + + Response getFileCitationV1PostDeaccessionNoApiToken = UtilIT.getFileCitation(fileId, "1.0", null); + getFileCitationV1PostDeaccessionNoApiToken.prettyPrint(); + getFileCitationV1PostDeaccessionNoApiToken.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()) + .body("message", equalTo("Dataset version cannot be found or unauthorized.")); + + Response getFileCitationV1PostDeaccessionNoPermsUser = UtilIT.getFileCitation(fileId, "1.0", noPermsApiToken); + getFileCitationV1PostDeaccessionNoPermsUser.prettyPrint(); + getFileCitationV1PostDeaccessionNoPermsUser.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()) + .body("message", equalTo("Dataset version cannot be found or unauthorized.")); + } } From 89b7f277ccddfc849611d7e08c16fcd3b2af3dcc Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 23 Jan 2024 13:46:16 -0500 Subject: [PATCH 0567/1112] Fix the issue with the thumbnail size --- src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java | 2 +- src/main/webapp/resources/css/structure.css | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index ccf861ebdc8..03a0044a987 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -464,7 +464,7 @@ public static InputStream getLogoAsInputStream(Dataset dataset) { try { in = ImageThumbConverter.getImageThumbnailAsInputStream(thumbnailFile.getStorageIO(), - ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE).getInputStream(); + ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE).getInputStream(); } catch (IOException ioex) { logger.warning("getLogo(): Failed to get logo from DataFile for " + dataset.getStorageIdentifier() + " (" + ioex.getMessage() + ")"); diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css index 470c07d4534..b81cf2a2c47 100644 --- a/src/main/webapp/resources/css/structure.css +++ b/src/main/webapp/resources/css/structure.css @@ -483,7 +483,7 @@ span.search-term-match {font-weight: bold;} [id$='resultsTable'] div.card-title-icon-block span.label {vertical-align:15%} [id$='resultsTable'] div.card-preview-icon-block {width:48px; float:left; margin:4px 12px 6px 0;} [id$='resultsTable'] div.card-preview-icon-block a {display:block; height:48px; line-height:48px;} -[id$='resultsTable'] div.card-preview-icon-block img {vertical-align:middle;} +[id$='resultsTable'] div.card-preview-icon-block img {vertical-align:middle; max-width: 64px; max-height: 48px; padding-right: 10px;} [id$='resultsTable'] div.card-preview-icon-block span[class^='icon'], [id$='resultsTable'] div.card-preview-icon-block span[class^='glyphicon'] {font-size:2.8em;} From d40ecfd420bc34df884a6b4e820946f0f457c6be Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Jan 2024 13:57:09 -0500 Subject: [PATCH 0568/1112] add docs for "get file citation" API #10240 --- doc/sphinx-guides/source/api/native-api.rst | 53 ++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index dbe769e2fd1..f161dd67ca9 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -845,7 +845,12 @@ Datasets **Note** Creation of new datasets is done with a ``POST`` onto a Dataverse collection. See the Dataverse Collections section above. -**Note** In all commands below, dataset versions can be referred to as: +.. _dataset-version-specifiers: + +Dataset Version Specifiers +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In all commands below, dataset versions can be referred to as: * ``:draft`` the draft version, if any * ``:latest`` either a draft (if exists) or the latest published version. @@ -2712,6 +2717,8 @@ The fully expanded example above (without environment variables) looks like this Files ----- +.. _get-json-rep-of-file: + Get JSON Representation of a File ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3499,6 +3506,50 @@ The fully expanded example above (without environment variables) looks like this You can download :download:`dct.xml <../../../../src/test/resources/xml/dct.xml>` from the example above to see what the XML looks like. +Get File Citation as JSON +~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API is for getting the file citation as it appears on the file landing page. It is formatted in HTML and encoded in JSON. + +To specify the version, you can use ``:latest-published`` or ``:draft`` or ``1.0`` or any other style listed under :ref:`dataset-version-specifiers`. + +When the dataset version is published, authentication is not required: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export FILE_ID=42 + export DATASET_VERSION=":latest-published" + + curl "$SERVER_URL/api/files/$FILE_ID/versions/$DATASET_VERSION/citation" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/files/42/versions/:latest-published/citation" + +When the dataset version is a draft or deaccessioned, authentication is required: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export FILE_ID=42 + export DATASET_VERSION=":draft" + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$FILE_ID/versions/$DATASET_VERSION/citation" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/42/versions/:draft/citation + +If your file has a persistent identifier (PID, such as a DOI), you can pass it using the technique described under :ref:`get-json-rep-of-file`. + +This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. This functionality has been requested in https://github.com/IQSS/dataverse/issues/3140 and https://github.com/IQSS/dataverse/issues/9994. + Provenance ~~~~~~~~~~ From 521afc50d807aacb39a74166c303d61fe5f64b2d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Jan 2024 13:59:32 -0500 Subject: [PATCH 0569/1112] add release note for "get file citation" API #10240 --- doc/release-notes/10240-file-citation.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 doc/release-notes/10240-file-citation.md diff --git a/doc/release-notes/10240-file-citation.md b/doc/release-notes/10240-file-citation.md new file mode 100644 index 00000000000..fb747527669 --- /dev/null +++ b/doc/release-notes/10240-file-citation.md @@ -0,0 +1,5 @@ +## Get file citation as JSON + +It is now possible to retrieve via API the file citation as it appears on the file landing page. It is formatted in HTML and encoded in JSON. + +This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. This functionality has been requested in https://github.com/IQSS/dataverse/issues/3140 and https://github.com/IQSS/dataverse/issues/9994 From 59690d4c9a2b5686e3b38f07c634fb32323400ff Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 Jan 2024 09:55:46 -0500 Subject: [PATCH 0570/1112] emphasize need to check flyway number before merging #10101 --- .../source/developers/sql-upgrade-scripts.rst | 2 ++ doc/sphinx-guides/source/qa/qa-workflow.md | 8 +++++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/developers/sql-upgrade-scripts.rst b/doc/sphinx-guides/source/developers/sql-upgrade-scripts.rst index bace682b1b8..4689aeec0f2 100644 --- a/doc/sphinx-guides/source/developers/sql-upgrade-scripts.rst +++ b/doc/sphinx-guides/source/developers/sql-upgrade-scripts.rst @@ -21,6 +21,8 @@ If you are creating a new database table (which maps to an ``@Entity`` in JPA), If you are doing anything other than creating a new database table such as adding a column to an existing table, you must create or update a SQL upgrade script. +.. _create-sql-script: + How to Create a SQL Upgrade Script ---------------------------------- diff --git a/doc/sphinx-guides/source/qa/qa-workflow.md b/doc/sphinx-guides/source/qa/qa-workflow.md index df274d2405d..cb047a3086a 100644 --- a/doc/sphinx-guides/source/qa/qa-workflow.md +++ b/doc/sphinx-guides/source/qa/qa-workflow.md @@ -27,9 +27,11 @@ Same as for doc, just a heads up to an admin for something of note or especially upgrade instructions as needed. -1. Does it use a DB, Flyway script? +1. Does it include a database migration script (Flyway)? - Good to know since it may collide with another existing one by version or it could be a one way transform of your DB so back up your test DB before. Also, happens during deployment so be on the lookout for any issues. + First, check the numbering in the filename of the script. It must be in line with the rules defined at {ref}`create-sql-script`. If the number is out of date (very common for older pull requests), do not merge and ask the developer to rename the script. Otherwise, deployment will fail. + + Once you're sure the numbering is ok (the next available number, basically), back up your database and proceeed with testing. 1. Validate the documentation. @@ -94,4 +96,4 @@ 1. Delete merged branch - Just a housekeeping move if the PR is from IQSS. Click the delete branch button where the merge button had been. There is no deletion for outside contributions. \ No newline at end of file + Just a housekeeping move if the PR is from IQSS. Click the delete branch button where the merge button had been. There is no deletion for outside contributions. From 5292682d6724e1b24cb4001768ce82d97d8dc771 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 24 Jan 2024 12:05:09 -0500 Subject: [PATCH 0571/1112] fix for #10251 - sync terms popup required code --- .../harvard/iq/dataverse/util/FileUtil.java | 30 +++---------------- 1 file changed, 4 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 776d04e98cc..8decf74fe13 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1199,34 +1199,12 @@ public static boolean isGuestbookPopupRequired(DatasetVersion datasetVersion) { } public static boolean isTermsPopupRequired(DatasetVersion datasetVersion) { - - if (datasetVersion == null) { - logger.fine("TermsPopup not required because datasetVersion is null."); - return false; - } - //0. if version is draft then Popup "not required" - if (!datasetVersion.isReleased()) { - logger.fine("TermsPopup not required because datasetVersion has not been released."); + Boolean answer = popupDueToStateOrTerms(datasetVersion); + if(answer == null) { + logger.fine("TermsPopup is not required."); return false; } - // 1. License and Terms of Use: - if (datasetVersion.getTermsOfUseAndAccess() != null) { - if (!License.CC0.equals(datasetVersion.getTermsOfUseAndAccess().getLicense()) - && !(datasetVersion.getTermsOfUseAndAccess().getTermsOfUse() == null - || datasetVersion.getTermsOfUseAndAccess().getTermsOfUse().equals(""))) { - logger.fine("TermsPopup required because of license or terms of use."); - return true; - } - - // 2. Terms of Access: - if (!(datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess() == null) && !datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess().equals("")) { - logger.fine("TermsPopup required because of terms of access."); - return true; - } - } - - logger.fine("TermsPopup is not required."); - return false; + return answer; } /** From 51984163525453b7360dd0b89db8746b8d55c031 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 24 Jan 2024 13:04:33 -0500 Subject: [PATCH 0572/1112] fix null issue found in #10251 --- .../java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index ca3f5b4bded..de3f4d2ab56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -316,7 +316,7 @@ private void redirectToDownloadAPI(String downloadType, Long fileId, boolean gue Long fileMetadataId) { String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten, fileMetadataId); - if (downloadType.equals("GlobusTransfer")) { + if ("GlobusTransfer".equals(downloadType)) { PrimeFaces.current().executeScript(URLTokenUtil.getScriptForUrl(fileDownloadUrl)); } else { logger.fine("Redirecting to file download url: " + fileDownloadUrl); From 96f2c95a26f6bf9d153a0b95f6cea7bdac7bd4ea Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 Jan 2024 14:40:12 -0500 Subject: [PATCH 0573/1112] minor tweaks #10101 --- .../source/developers/making-releases.rst | 2 ++ doc/sphinx-guides/source/qa/overview.md | 12 +++---- .../source/qa/performance-tests.md | 6 ++-- doc/sphinx-guides/source/qa/qa-workflow.md | 14 ++++---- .../source/qa/test-automation.md | 35 ++++++++++--------- .../source/qa/testing-approach.md | 14 ++++---- .../source/qa/testing-infrastructure.md | 4 +-- 7 files changed, 45 insertions(+), 42 deletions(-) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 6b94282d55e..18ae34ee656 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -83,6 +83,8 @@ To test these images against our API test suite, go to the "alpha" workflow at h If there are failures, additional dependencies or settings may have been added to the "develop" workflow. Copy them over and try again. +.. _build-guides: + Build the Guides for the Release -------------------------------- diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index 01ab629db8c..f8eb7b19297 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -15,17 +15,17 @@ The basic workflow is as follows. Bugs or feature requests are submitted to GitH Before a pull request is moved to QA, it must be reviewed by a member of the development team from a coding perspective, and it must pass automated tests. There it is tested manually, exercising the UI (using three common browsers) and any business logic it implements. -Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions made in that documentation are tested. Once this passes and any bugs that are found are corrected, and the automated tests are confirmed to be passing, the PR is merged into the develop, the PR is closed, and the branch is deleted (if it is local). At this point, the PR moves from the QA column automatically into the Done column and the process repeats with the next PR until it is decided to {doc}`make a release `. +Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions made in that documentation are tested. Once this passes and any bugs that are found are corrected, and the automated tests are confirmed to be passing, the PR is merged into the develop branch, the PR is closed, and the branch is deleted (if it is local). At this point, the PR moves from the QA column automatically into the Merged column (where it might be discussed at the next standup) and the process repeats with the next PR until it is decided to {doc}`make a release `. ## Tips and Tricks - Start testing simply, with the most obvious test. You don’t need to know all your tests upfront. As you gain comfort and understanding of how it works, try more tests until you are done. If it is a complex feature, jot down your tests in an outline format, some beforehand as a guide, and some after as things occur to you. Save the doc in a testing folder (on Google Drive). This potentially will help with future testing. - When in doubt, ask someone. If you are confused about how something is working, it may be something you have missed, or it could be a documentation issue, or it could be a bug! Talk to the code reviewer and the contributor/developer for their opinion and advice. -- Always tail the server.log file while testing. Open a terminal window to the test instance and `tail -F server.log`. This helps you get a real-time sense of what the server is doing when you act and makes it easier to identify any stack trace on failure. -- When overloaded, do the simple pull requests first to reduce the queue. It gives you a mental boost to complete something and reduces the perception of the amount of work still to be done. -- When testing a bug fix, try reproducing the bug on the demo before testing the fix, that way you know you are taking the correct steps to verify that the fix worked. +- Always tail the server.log file while testing. Open a terminal window to the test instance and `tail -F server.log`. This helps you get a real-time sense of what the server is doing when you interact with the application and makes it easier to identify any stack trace on failure. +- When overloaded, QA the simple pull requests first to reduce the queue. It gives you a mental boost to complete something and reduces the perception of the amount of work still to be done. +- When testing a bug fix, try reproducing the bug on the demo server before testing the fix. That way you know you are taking the correct steps to verify that the fix worked. - When testing an optional feature that requires configuration, do a smoke test without the feature configured and then with it configured. That way you know that folks using the standard config are unaffected by the option if they choose not to configure it. -- Back up your DB before applying an irreversible DB update and you are using a persistent/reusable platform. Just in case it fails, and you need to carry on testing something else you can use the backup. +- Back up your DB before applying an irreversible DB update when you are using a persistent/reusable platform. Just in case it fails, and you need to carry on testing something else you can use the backup. ## Release Cadence and Sprints @@ -41,4 +41,4 @@ This type of approach is often used to give contributing developers confidence t ## Making a Release -See {doc}`/developers/making-releases` in the Developer Guide. \ No newline at end of file +See {doc}`/developers/making-releases` in the Developer Guide. diff --git a/doc/sphinx-guides/source/qa/performance-tests.md b/doc/sphinx-guides/source/qa/performance-tests.md index 3fab0386eb0..404188735a2 100644 --- a/doc/sphinx-guides/source/qa/performance-tests.md +++ b/doc/sphinx-guides/source/qa/performance-tests.md @@ -7,7 +7,7 @@ ## Introduction -The final testing activity before producing a release is performance testing. This could be done throughout the release cycle but since it is time-consuming it is done once near the end. Using a load-generating tool named {ref}`Locust `, it loads the statistically most loaded pages, according to Google Analytics, that is 50% homepage and 50% some type of dataset page. +The final testing activity before producing a release is performance testing. This could be done throughout the release cycle but since it is time-consuming, it is done once near the end. Using a load-generating tool named {ref}`Locust `, our scripts load the statistically most-loaded pages (according to Google Analytics): 50% homepage and 50% some type of dataset page. Since dataset page weight also varies by the number of files, a selection of about 10 datasets with varying file counts is used. The pages are called randomly as a guest user with increasing levels of user load, from 1 user to 250 users. Typical daily loads in production are around the 50-user level. Though the simulated user level does have a modest amount of random think time before repeated calls, from 5-20 seconds, it is not a real-world load so direct comparisons to production are not reliable. Instead, we compare performance to prior versions of the product, and based on how that performed in production we have some idea whether this might be similar in performance or whether there is some undetected issue that appears under load, such as inefficient or too many DB queries per page. @@ -19,11 +19,11 @@ Once the performance has been tested and recorded in a [Google spreadsheet](http ## Access -Access to performance cluster instances requires ssh keys. The cluster itself is normally not running to reduce costs. To turn on the cluster, log on to the demo server and run the perfenv scripts from the centos default user dir. Access to the demo requires an ssh key, see Leonid. +Access to performance cluster instances requires ssh keys. The cluster itself is normally not running to reduce costs. To turn on the cluster, log on to the demo server and run the perfenv scripts from the centos default user dir. ## Special Notes ⚠️ -Please note the performance database is also used occasionally by Julian and the Curation team to generate prod reports so a courtesy check with Julian would be good before taking over the env. +Please note the performance database is also used occasionally by members of the Curation team to generate prod reports so a courtesy check with them would be good before taking over the env. Executing the Performance Script diff --git a/doc/sphinx-guides/source/qa/qa-workflow.md b/doc/sphinx-guides/source/qa/qa-workflow.md index cb047a3086a..3db17ecb8a4 100644 --- a/doc/sphinx-guides/source/qa/qa-workflow.md +++ b/doc/sphinx-guides/source/qa/qa-workflow.md @@ -23,9 +23,9 @@ Small changes or fixes usually don’t have docs but new features or extensions of a feature or new configuration options should have documentation. -1. Does it have or need release notes? +1. Does it have or need a release note snippet? - Same as for doc, just a heads up to an admin for something of note or especially upgrade instructions as needed. + Same as for doc, just a heads up to an admin for something of note or especially upgrade instructions as needed. See also {ref}`writing-release-note-snippets` for what to expect in a release note snippet. 1. Does it include a database migration script (Flyway)? @@ -35,7 +35,7 @@ 1. Validate the documentation. - Build the doc using Jenkins, does it build without errors? + Build the doc using Jenkins or read the automated Read the Docs preview. Does it build without errors? Read it through for sense. Use it for test cases and to understand the feature. @@ -88,11 +88,11 @@ Click the "Merge pull request" button and be sure to use the "Create a merge commit" option to include this PR into the common develop branch. - Some of the reasons why we encourage using option over Rebase or Squash are: + Some of the reasons why we encourage using this option over Rebase or Squash are: - -Preserving commit history - -Clearer context and treaceability - -Easier collaboration, bug tracking and reverting + - Preservation of commit history + - Clearer context and treaceability + - Easier collaboration, bug tracking and reverting 1. Delete merged branch diff --git a/doc/sphinx-guides/source/qa/test-automation.md b/doc/sphinx-guides/source/qa/test-automation.md index c996b4cea8f..e4b3b12ec43 100644 --- a/doc/sphinx-guides/source/qa/test-automation.md +++ b/doc/sphinx-guides/source/qa/test-automation.md @@ -4,7 +4,7 @@ :depth: 3 ``` -## Introduction +## Jenkins Jenkins is our primary tool for knowing if our API tests are passing. (Unit tests are executed locally by developers.) @@ -12,28 +12,27 @@ You can find our Jenkins installation at . Please note that while it has been open to the public in the past, it is currently firewalled off. We can poke a hole in the firewall for your IP address if necessary. Please get in touch. (You might also be interested in which is about restoring the ability of contributors to see if their pull requests are passing API tests or not.) -## Jobs +### Jenkins Jobs Jenkins is organized into jobs. We'll highlight a few. -### IQSS-dataverse-develop +#### IQSS-dataverse-develop -, which we will refer to as the "develop" job runs after pull requests are merged. It is crucial that this job stays green (passing) because we always want to stay in a "release ready" state. If you notice that this job is failing, make noise about it! +, which we will refer to as the "develop" job, runs after pull requests are merged. It is crucial that this job stays green (passing) because we always want to stay in a "release ready" state. If you notice that this job is failing, make noise about it! -You can get to this job from the README at . +You can access this job from the README at . -### IQSS-Dataverse-Develop-PR +#### IQSS-Dataverse-Develop-PR can be thought of as "PR jobs". It's a collection of jobs run on pull requests. Typically, you will navigate directly into the job (and it's particular build number) from a pull request. For example, from , look for a check called "continuous-integration/jenkins/pr-merge". Clicking it will bring you to a particular build like (build #10). -### guides.dataverse.org +#### guides.dataverse.org - is what we use to build guides. See {doc}`/developers/making-releases` in the Developer Guide. + is what we use to build guides. See {ref}`build-guides` in the Developer Guide for how this job is used at release time. -### Building and Deploying a Pull Request from Jenkins to Dataverse-Internal +#### Building and Deploying a Pull Request from Jenkins to Dataverse-Internal - -1. Log on to GitHub, go to projects, dataverse to see Kanban board, select a pull request to test from the QA queue. +1. Go to the QA column on our [project board](https://github.com/orgs/IQSS/projects/34), and select a pull request to test. 1. From the pull request page, click the copy icon next to the pull request branch name. @@ -50,15 +49,13 @@ You can get to this job from the README at . 1. Once complete, go to and check that the deployment succeeded, and that the homepage displays the latest build number. -1. If for some reason it didn’t deploy, check the server.log file. It may just be a caching issue so try un-deploying, deleting cache, restarting, and re-deploying on the server (`su - dataverse` then `/usr/local/payara5/bin/asadmin list-applications; /usr/local/payara5/bin/asadmin undeploy dataverse-5.11.1; /usr/local/payara5/bin/asadmin deploy /tmp/dataverse-5.11.1.war`) +1. If for some reason it didn't deploy, check the server.log file. It may just be a caching issue so try un-deploying, deleting cache, restarting, and re-deploying on the server (`su - dataverse` then `/usr/local/payara6/bin/asadmin list-applications; /usr/local/payara6/bin/asadmin undeploy dataverse-6.1; /usr/local/payara6/bin/asadmin deploy /tmp/dataverse-6.1.war`) -1. If that didn't work, you may have run into a Flyway DB script collision error but that should be indicated by the server.log. See {doc}`/developers/sql-upgrade-scripts` in the Developer Guide. +1. If that didn't work, you may have run into a Flyway DB script collision error but that should be indicated by the server.log. See {doc}`/developers/sql-upgrade-scripts` in the Developer Guide. In the case of a collision, ask the developer to rename the script. 1. Assuming the above steps worked, and they should 99% of the time, test away! Note: be sure to `tail -F server.log` in a terminal window while you are doing any testing. This way you can spot problems that may not appear in the UI and have easier access to any stack traces for easier reporting. - - -## Checking if API Tests are Passing +### Checking if API Tests are Passing on Jenkins If API tests are failing, you should not merge the pull request. @@ -70,7 +67,7 @@ How can you know if API tests are passing? Here are the steps, by way of example - Under "All Tests", look at the duration for "edu.harvard.iq.dataverse.api". It should be ten minutes or higher. If it was only a few seconds, tests did not run. - Assuming tests ran, if there were failures, they should appear at the top under "All Failed Tests". Inform the author of the pull request about the error. -## Diagnosing Failures +### Diagnosing Failures on Jenkins API test failures can have multiple causes. As described above, from the "Test Result" page, you might see the failure under "All Failed Tests". However, the test could have failed because of some underlying system issue. @@ -84,3 +81,7 @@ fatal: [localhost]: FAILED! => {"changed": false, "dest": "/tmp/payara.zip", "el ``` In the example above, if Payara can't be downloaded, we're obviously going to have problems deploying Dataverse to it! + +## GitHub Actions + +We also use GitHub Actions. See for a list of actions. diff --git a/doc/sphinx-guides/source/qa/testing-approach.md b/doc/sphinx-guides/source/qa/testing-approach.md index 2c7241999a8..817161d02a0 100644 --- a/doc/sphinx-guides/source/qa/testing-approach.md +++ b/doc/sphinx-guides/source/qa/testing-approach.md @@ -8,25 +8,25 @@ We use a risk-based, manual testing approach to achieve the most benefit with limited resources. This means we want to catch bugs where they are likely to exist, ensure core functions work, and failures do not have catastrophic results. In practice this means we do a brief positive check of core functions on each build called a smoke test, we test the most likely place for new bugs to exist, the area where things have changed, and attempt to prevent catastrophic failure by asking about the scope and reach of the code and how failures may occur. -If it seems possible through user error or some other occurrence that such a serious failure will occur, we try to make it happen in the test environment. If the code has a UI component, we also do a limited amount of browser compatibility testing using Chrome, Firefox, and Safari browsers. We do not currently do UX or accessibility testing on a regular basis, though both have been done product-wide by the Design group and by the community. +If it seems possible through user error or some other occurrence that such a serious failure will occur, we try to make it happen in the test environment. If the code has a UI component, we also do a limited amount of browser compatibility testing using Chrome, Firefox, and Safari browsers. We do not currently do UX or accessibility testing on a regular basis, though both have been done product-wide by a Design group (in the past) and by the community. ## Examining a Pull Request for Test Cases ### What Problem Does It Solve? -Read the top part of the pull request for a description, notes for reviewers, and usually a "how to test" section. Does it make sense? If not, read the underlying issue it closes, and any release notes or documentation. Knowing in general what it does helps you to think about how to approach it. +Read the top part of the pull request for a description, notes for reviewers, and usually a "how to test" section. Does it make sense? If not, read the underlying issue it closes and any release notes or documentation. Knowing in general what it does helps you to think about how to approach it. ### How is It Configured? -Most pull requests do not have any special configuration and are enabled on deployment, but some do. Configuration is part of testing. A sysadmin or superuser will need to follow these instructions so try them out. Plus, that is the only way you will get it working to test it! +Most pull requests do not have any special configuration and are enabled on deployment, but some do. Configuration is part of testing. A sysadmin or superuser will need to follow these instructions so make sure they are in the release note snippet and try them out. Plus, that is the only way you will get it working to test it! -Identify test cases by examining the problem report or feature description and any documentation of functionality. Look for statements or assertions about functions, what it does, as well as conditions or conditional behavior. These become your test cases. Think about how someone might make a mistake using it and try it. Does it fail gracefully or in a confusing or worse, damaging manner? Also, consider whether this pull request may interact with other functionality and try some spot checks there. For instance, if new metadata fields are added, try the export feature. Of course, try the suggestions under "how to test." Those may be sufficient, but you should always think about the pull request based on what it does. +Identify test cases by examining the problem report or feature description and any documentation of functionality. Look for statements or assertions about functions, what it does, as well as conditions or conditional behavior. These become your test cases. Think about how someone might make a mistake using it and try it. Does it fail gracefully or in a confusing, or worse, damaging manner? Also, consider whether this pull request may interact with other functionality and try some spot checks there. For instance, if new metadata fields have been added, try the export feature. Of course, try the suggestions under "how to test." Those may be sufficient, but you should always think about the pull request based on what it does. -Try adding, modifying, and deleting any objects involved. This is probably covered by using the feature but a good basic approach to keep in mind. +Try adding, modifying, and deleting any objects involved. This is probably covered by using the feature, but this is a good basic approach to keep in mind. Make sure any server logging is appropriate. You should tail the server log while running your tests. Watch for unreported errors or stack traces especially chatty logging. If you do find a bug you will need to report the stack trace from the server.log. Err on the side of providing the developer too much of server.log rather than too little. -Exercise the UI if there is one. We tend to use Chrome for most of my basic testing as it's used twice as much as the next most commonly used browser, according to our site's Google Analytics. First go through all the options in the UI. Then, if all works, spot-check using Firefox and Safari. +Exercise the UI if there is one. We tend to use Chrome for most of our basic testing as it's used twice as much as the next most commonly-used browser, according to our site's Google Analytics. First go through all the options in the UI. Then, if all works, spot-check using Firefox and Safari. Check permissions. Is this feature limited to a specific set of users? Can it be accessed by a guest or by a non-privileged user? How about pasting a privileged page URL into a non-privileged user’s browser? @@ -47,4 +47,4 @@ Think about risk. Is the feature or function part of a critical area such as per This workflow is fine for a single person testing a PR, one at a time. It would be awkward or impossible if there were multiple people wanting to test different PRs at the same time. If a developer is testing, they would likely just deploy to their dev environment. That might be ok, but is the env is fully configured enough to offer a real-world testing scenario? -An alternative might be to spin an EC2 branch on AWS, potentially using sample data. This can take some time so another option might be to spin up a few, persistent AWS instances with sample data this way, one per tester, and just deploy new builds there when you want to test. You could even configure Jenkins projects for each if desired to maintain consistency in how they’re built. \ No newline at end of file +An alternative might be to spin an EC2 branch on AWS, potentially using sample data. This can take some time so another option might be to spin up a few, persistent AWS instances with sample data this way, one per tester, and just deploy new builds there when you want to test. You could even configure Jenkins projects for each if desired to maintain consistency in how they’re built. diff --git a/doc/sphinx-guides/source/qa/testing-infrastructure.md b/doc/sphinx-guides/source/qa/testing-infrastructure.md index 7a4bda626fc..c099076c458 100644 --- a/doc/sphinx-guides/source/qa/testing-infrastructure.md +++ b/doc/sphinx-guides/source/qa/testing-infrastructure.md @@ -7,11 +7,11 @@ ## Dataverse Internal -To build and test a PR, we use a build named `IQSS_Dataverse_Internal` on , which deploys the .war file to an AWS instance named . +To build and test a PR, we use a job called `IQSS_Dataverse_Internal` on (see {doc}`test-automation`), which deploys the .war file to an AWS instance named . ## Guides Server -There is also a guides build project named `guides.dataverse.org`. Any test builds of guides are deployed to a named directory on guides.dataverse.org and can be found and tested by going to the existing guides, removing the part of the URL that contains the version, and browsing the resulting directory listing for the latest change. +There is also a guides job called `guides.dataverse.org` (see {doc}`test-automation`). Any test builds of guides are deployed to a named directory on guides.dataverse.org and can be found and tested by going to the existing guides, removing the part of the URL that contains the version, and browsing the resulting directory listing for the latest change. Note that changes to guides can also be previewed on Read the Docs. In the pull request, look for a link like . This Read the Docs preview is also mentioned under also {doc}`/developers/documentation`. From d06ded15c9da2024f75250bcc8a25c363ae1cdc9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 Jan 2024 14:51:57 -0500 Subject: [PATCH 0574/1112] move "deploy to internal" out of "test automation" #10101 --- doc/sphinx-guides/source/qa/qa-workflow.md | 2 +- .../source/qa/test-automation.md | 25 ------------------ .../source/qa/testing-infrastructure.md | 26 +++++++++++++++++++ 3 files changed, 27 insertions(+), 26 deletions(-) diff --git a/doc/sphinx-guides/source/qa/qa-workflow.md b/doc/sphinx-guides/source/qa/qa-workflow.md index 3db17ecb8a4..4654a7456d2 100644 --- a/doc/sphinx-guides/source/qa/qa-workflow.md +++ b/doc/sphinx-guides/source/qa/qa-workflow.md @@ -41,7 +41,7 @@ 1. Build and deploy the pull request. - Normally this is done using Jenkins and automatically deployed to the QA test machine. + Normally this is done using Jenkins and automatically deployed to the QA test machine. See {ref}`deploy-to-internal`. 1. Configure if required diff --git a/doc/sphinx-guides/source/qa/test-automation.md b/doc/sphinx-guides/source/qa/test-automation.md index e4b3b12ec43..708d0f88e23 100644 --- a/doc/sphinx-guides/source/qa/test-automation.md +++ b/doc/sphinx-guides/source/qa/test-automation.md @@ -30,31 +30,6 @@ You can access this job from the README at . is what we use to build guides. See {ref}`build-guides` in the Developer Guide for how this job is used at release time. -#### Building and Deploying a Pull Request from Jenkins to Dataverse-Internal - -1. Go to the QA column on our [project board](https://github.com/orgs/IQSS/projects/34), and select a pull request to test. - -1. From the pull request page, click the copy icon next to the pull request branch name. - -1. Log on to , select the `IQSS_Dataverse_Internal` project, and configure the repository URL and branch specifier to match the ones from the pull request. For example: - - * 8372-gdcc-xoai-library has IQSS implied - - **Repository URL:** https://github.com/IQSS/dataverse.git - - **Branch specifier:** */8372-gdcc-xoai-library - * GlobalDataverseCommunityConsortium:GDCC/DC-3B - - **Repository URL:** https://github.com/GlobalDataverseCommunityConsortium/dataverse.git - - **Branch specifier:** */GDCC/DC-3B. - -1. Click "Build Now" and note the build number in progress. - -1. Once complete, go to and check that the deployment succeeded, and that the homepage displays the latest build number. - -1. If for some reason it didn't deploy, check the server.log file. It may just be a caching issue so try un-deploying, deleting cache, restarting, and re-deploying on the server (`su - dataverse` then `/usr/local/payara6/bin/asadmin list-applications; /usr/local/payara6/bin/asadmin undeploy dataverse-6.1; /usr/local/payara6/bin/asadmin deploy /tmp/dataverse-6.1.war`) - -1. If that didn't work, you may have run into a Flyway DB script collision error but that should be indicated by the server.log. See {doc}`/developers/sql-upgrade-scripts` in the Developer Guide. In the case of a collision, ask the developer to rename the script. - -1. Assuming the above steps worked, and they should 99% of the time, test away! Note: be sure to `tail -F server.log` in a terminal window while you are doing any testing. This way you can spot problems that may not appear in the UI and have easier access to any stack traces for easier reporting. - ### Checking if API Tests are Passing on Jenkins If API tests are failing, you should not merge the pull request. diff --git a/doc/sphinx-guides/source/qa/testing-infrastructure.md b/doc/sphinx-guides/source/qa/testing-infrastructure.md index c099076c458..804e4c0afe6 100644 --- a/doc/sphinx-guides/source/qa/testing-infrastructure.md +++ b/doc/sphinx-guides/source/qa/testing-infrastructure.md @@ -9,6 +9,32 @@ To build and test a PR, we use a job called `IQSS_Dataverse_Internal` on (see {doc}`test-automation`), which deploys the .war file to an AWS instance named . +(deploy-to-internal)= +### Building and Deploying a Pull Request from Jenkins to Dataverse-Internal + +1. Go to the QA column on our [project board](https://github.com/orgs/IQSS/projects/34), and select a pull request to test. + +1. From the pull request page, click the copy icon next to the pull request branch name. + +1. Log on to , select the `IQSS_Dataverse_Internal` project, and configure the repository URL and branch specifier to match the ones from the pull request. For example: + + * 8372-gdcc-xoai-library has IQSS implied + - **Repository URL:** https://github.com/IQSS/dataverse.git + - **Branch specifier:** */8372-gdcc-xoai-library + * GlobalDataverseCommunityConsortium:GDCC/DC-3B + - **Repository URL:** https://github.com/GlobalDataverseCommunityConsortium/dataverse.git + - **Branch specifier:** */GDCC/DC-3B. + +1. Click "Build Now" and note the build number in progress. + +1. Once complete, go to and check that the deployment succeeded, and that the homepage displays the latest build number. + +1. If for some reason it didn't deploy, check the server.log file. It may just be a caching issue so try un-deploying, deleting cache, restarting, and re-deploying on the server (`su - dataverse` then `/usr/local/payara6/bin/asadmin list-applications; /usr/local/payara6/bin/asadmin undeploy dataverse-6.1; /usr/local/payara6/bin/asadmin deploy /tmp/dataverse-6.1.war`) + +1. If that didn't work, you may have run into a Flyway DB script collision error but that should be indicated by the server.log. See {doc}`/developers/sql-upgrade-scripts` in the Developer Guide. In the case of a collision, ask the developer to rename the script. + +1. Assuming the above steps worked, and they should 99% of the time, test away! Note: be sure to `tail -F server.log` in a terminal window while you are doing any testing. This way you can spot problems that may not appear in the UI and have easier access to any stack traces for easier reporting. + ## Guides Server There is also a guides job called `guides.dataverse.org` (see {doc}`test-automation`). Any test builds of guides are deployed to a named directory on guides.dataverse.org and can be found and tested by going to the existing guides, removing the part of the URL that contains the version, and browsing the resulting directory listing for the latest change. From 5ffc0589c75fe2fcf2584050ae5a477ddce27e06 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 Jan 2024 15:06:42 -0500 Subject: [PATCH 0575/1112] move testing approaches just below overview #10101 --- doc/sphinx-guides/source/qa/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/qa/index.md b/doc/sphinx-guides/source/qa/index.md index 937b352bccb..f16cd1d38fc 100644 --- a/doc/sphinx-guides/source/qa/index.md +++ b/doc/sphinx-guides/source/qa/index.md @@ -2,9 +2,9 @@ ```{toctree} overview.md +testing-approach.md testing-infrastructure.md qa-workflow.md -testing-approach.md test-automation.md performance-tests.md ``` From 61abe519a429be60616cd61a56df4ad4f4aa52dd Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 Jan 2024 15:12:01 -0500 Subject: [PATCH 0576/1112] minor edits #10101 --- doc/sphinx-guides/source/qa/overview.md | 2 ++ doc/sphinx-guides/source/qa/qa-workflow.md | 1 + 2 files changed, 3 insertions(+) diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index f8eb7b19297..64796357831 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -17,6 +17,8 @@ Before a pull request is moved to QA, it must be reviewed by a member of the dev Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions made in that documentation are tested. Once this passes and any bugs that are found are corrected, and the automated tests are confirmed to be passing, the PR is merged into the develop branch, the PR is closed, and the branch is deleted (if it is local). At this point, the PR moves from the QA column automatically into the Merged column (where it might be discussed at the next standup) and the process repeats with the next PR until it is decided to {doc}`make a release `. +The complete suggested workflow can be found at {doc}`qa-workflow`. + ## Tips and Tricks - Start testing simply, with the most obvious test. You don’t need to know all your tests upfront. As you gain comfort and understanding of how it works, try more tests until you are done. If it is a complex feature, jot down your tests in an outline format, some beforehand as a guide, and some after as things occur to you. Save the doc in a testing folder (on Google Drive). This potentially will help with future testing. diff --git a/doc/sphinx-guides/source/qa/qa-workflow.md b/doc/sphinx-guides/source/qa/qa-workflow.md index 4654a7456d2..9915fe97d98 100644 --- a/doc/sphinx-guides/source/qa/qa-workflow.md +++ b/doc/sphinx-guides/source/qa/qa-workflow.md @@ -4,6 +4,7 @@ :local: :depth: 3 ``` +## Checklist 1. Assign the PR you are working on to yourself. From cad9e583732a568ff083999aba16941505a207f4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 Jan 2024 15:20:17 -0500 Subject: [PATCH 0577/1112] add release note #10101 --- doc/release-notes/10101-qa-guide.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10101-qa-guide.md diff --git a/doc/release-notes/10101-qa-guide.md b/doc/release-notes/10101-qa-guide.md new file mode 100644 index 00000000000..11fbd7df2c4 --- /dev/null +++ b/doc/release-notes/10101-qa-guide.md @@ -0,0 +1 @@ +A new QA Guide is intended mostly for the core development team but may be of interest to contributors. From 93de747a3f7c31cbbacd9e4d3895c00f44c0b522 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 24 Jan 2024 16:49:31 -0500 Subject: [PATCH 0578/1112] Updating flyway name --- ...straints.sql => V6.1.0.4__9983-missing-unique-constraints.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.2__9983-missing-unique-constraints.sql => V6.1.0.4__9983-missing-unique-constraints.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.2__9983-missing-unique-constraints.sql b/src/main/resources/db/migration/V6.1.0.4__9983-missing-unique-constraints.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.2__9983-missing-unique-constraints.sql rename to src/main/resources/db/migration/V6.1.0.4__9983-missing-unique-constraints.sql From 743dbbc6655fd9e8bcab9db7b9df71a2fa4758db Mon Sep 17 00:00:00 2001 From: beep Date: Thu, 25 Jan 2024 08:37:24 +0100 Subject: [PATCH 0579/1112] Update docker-compose-dev.yml Co-authored-by: Philip Durbin --- docker-compose-dev.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 76a4c8a745d..6eab84092ed 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -60,8 +60,8 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets - # Map the glassfish applications folder so that we can update webapp resources using scripts/intellij/cpwebapp.sh - - ./docker-dev-volumes/glassfish/applications:/opt/payara/appserver/glassfish/domains/domain1/applications + # Uncomment to map the glassfish applications folder so that we can update webapp resources using scripts/intellij/cpwebapp.sh + # - ./docker-dev-volumes/glassfish/applications:/opt/payara/appserver/glassfish/domains/domain1/applications # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). # Replace 6.0 with the current version. # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse From 9d124e760bba83b7baa46bb1f88ec453a6bf6e6a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 25 Jan 2024 11:51:12 +0000 Subject: [PATCH 0580/1112] Refactor: GetLatestPublishedDatasetVersionCommand --- ...tLatestPublishedDatasetVersionCommand.java | 50 +++++++++---------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index dd9a8112afe..9ba02ef750b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -17,7 +17,7 @@ public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final boolean includeDeaccessioned; - private boolean checkPermsWhenDeaccessioned; + private final boolean checkPermsWhenDeaccessioned; public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { this(aRequest, anAffectedDataset, false, false); @@ -31,37 +31,35 @@ public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Datase } /* - * This command depending on the requested parameters will return: - * - * If the user requested to include a deaccessioned dataset with the files, the command will return the deaccessioned version if the user has permissions to view the files. Otherwise, it will return null. - * If the user requested to include a deaccessioned dataset but did not request the files, the command will return the deaccessioned version. - * If the user did not request to include a deaccessioned dataset, the command will return the latest published version. - * - */ + * This command depending on the requested parameters will return: + * + * If the user requested to include a deaccessioned dataset with the files, the command will return the deaccessioned version if the user has permissions to view the files. Otherwise, it will return null. + * If the user requested to include a deaccessioned dataset but did not request the files, the command will return the deaccessioned version. + * If the user did not request to include a deaccessioned dataset, the command will return the latest published version. + * + */ @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - - DatasetVersion dsv = null; - - //We search of a released or deaccessioned version if it is requested. - for (DatasetVersion next : ds.getVersions()) { - if (next.isReleased() || (includeDeaccessioned && next.isDeaccessioned())){ - dsv = next; - break; - } + DatasetVersion dsVersionResult = getReleaseOrDeaccessionedVersion(); + if (dsVersionResult != null && userHasPermissionsOnDatasetVersion(dsVersionResult, checkPermsWhenDeaccessioned, ctxt, ds)) { + return dsVersionResult; } + return null; + } - //Checking permissions if the deaccessionedVersion was found and we are checking permissions because files were requested. - if(dsv != null && (dsv.isDeaccessioned() && checkPermsWhenDeaccessioned)){ - //If the user has no permissions we return null - if(!ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset)){ - dsv = null; + private DatasetVersion getReleaseOrDeaccessionedVersion() { + for (DatasetVersion dsVersion : ds.getVersions()) { + if (dsVersion.isReleased() || (includeDeaccessioned && dsVersion.isDeaccessioned())) { + return dsVersion; } } - - return dsv; + return null; } - - + private boolean userHasPermissionsOnDatasetVersion(DatasetVersion dsVersionResult, boolean checkPermsWhenDeaccessioned, CommandContext ctxt, Dataset ds) { + if (dsVersionResult.isDeaccessioned() && checkPermsWhenDeaccessioned) { + return ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset); + } + return true; + } } From e59907bf76553701c8d7ff16428a9cea9f132d96 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 25 Jan 2024 11:55:13 +0000 Subject: [PATCH 0581/1112] Refactor: method name --- .../command/impl/GetLatestPublishedDatasetVersionCommand.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index 9ba02ef750b..0afcbe2d0bb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -40,14 +40,14 @@ public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Datase */ @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - DatasetVersion dsVersionResult = getReleaseOrDeaccessionedVersion(); + DatasetVersion dsVersionResult = getReleaseOrDeaccessionedDatasetVersion(); if (dsVersionResult != null && userHasPermissionsOnDatasetVersion(dsVersionResult, checkPermsWhenDeaccessioned, ctxt, ds)) { return dsVersionResult; } return null; } - private DatasetVersion getReleaseOrDeaccessionedVersion() { + private DatasetVersion getReleaseOrDeaccessionedDatasetVersion() { for (DatasetVersion dsVersion : ds.getVersions()) { if (dsVersion.isReleased() || (includeDeaccessioned && dsVersion.isDeaccessioned())) { return dsVersion; From 252672ab68a52cd9b9d8e84b80ddb3f23df769b3 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 24 Jan 2024 14:44:52 -0500 Subject: [PATCH 0582/1112] Proposed fix in #10220 comments --- .../iq/dataverse/ThumbnailServiceWrapper.java | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index ae81a9326c4..7f56ce0cb27 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -5,11 +5,14 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; - +import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; @@ -170,17 +173,30 @@ public String getDatasetCardImageAsUrl(Dataset dataset, Long versionId, boolean if (thumbnailFile == null) { - // We attempt to auto-select via the optimized, native query-based method + boolean hasDatasetLogo = false; + StorageIO storageIO = null; + try { + storageIO = DataAccess.getStorageIO(dataset); + if (!storageIO.isAuxObjectCached(DatasetUtil.datasetLogoFilenameFinal)) { + // If not, return null/use the default, otherwise pass the logo URL + hasDatasetLogo = true; + } + } catch (IOException ioex) { + logger.warning("getDatasetCardImageAsUrl(): Failed to initialize dataset StorageIO for " + + dataset.getStorageIdentifier() + " (" + ioex.getMessage() + ")"); + } + // If no other logo we attempt to auto-select via the optimized, native + // query-based method // from the DatasetVersionService: - if (datasetVersionService.getThumbnailByVersionId(versionId) == null) { + if (!hasDatasetLogo && datasetVersionService.getThumbnailByVersionId(versionId) == null) { return null; } } - String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; logger.fine("getDatasetCardImageAsUrl: " + url); this.dvobjectThumbnailsMap.put(datasetId,url); return url; + } // it's the responsibility of the user - to make sure the search result From 2c989923fba155ef0fe56f46489c3eec77abb213 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 24 Jan 2024 17:10:43 -0500 Subject: [PATCH 0583/1112] reverse logic --- .../java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 7f56ce0cb27..b6ab23848e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -177,7 +177,7 @@ public String getDatasetCardImageAsUrl(Dataset dataset, Long versionId, boolean StorageIO storageIO = null; try { storageIO = DataAccess.getStorageIO(dataset); - if (!storageIO.isAuxObjectCached(DatasetUtil.datasetLogoFilenameFinal)) { + if (storageIO.isAuxObjectCached(DatasetUtil.datasetLogoFilenameFinal)) { // If not, return null/use the default, otherwise pass the logo URL hasDatasetLogo = true; } From 77ba2932551c4a1015745ef2f911fbb5ff7c730d Mon Sep 17 00:00:00 2001 From: landreev Date: Thu, 25 Jan 2024 11:23:19 -0500 Subject: [PATCH 0584/1112] Revert "9686 move harvesting client" --- .../9686-move-harvesting-client-id.md | 1 - .../edu/harvard/iq/dataverse/Dataset.java | 14 ++++- .../iq/dataverse/DatasetServiceBean.java | 48 +++++++++++++++++ .../edu/harvard/iq/dataverse/DvObject.java | 17 ------ .../iq/dataverse/DvObjectServiceBean.java | 48 ----------------- .../api/imports/ImportServiceBean.java | 5 -- .../client/HarvestingClientServiceBean.java | 4 +- .../dataverse/metrics/MetricsServiceBean.java | 52 +++++++++---------- .../search/SearchIncludeFragment.java | 41 ++++++--------- ...6.1.0.2__9686-move-harvestingclient-id.sql | 14 ----- .../harvard/iq/dataverse/api/DatasetsIT.java | 2 - .../harvard/iq/dataverse/api/MetricsIT.java | 17 +++--- 12 files changed, 112 insertions(+), 151 deletions(-) delete mode 100644 doc/release-notes/9686-move-harvesting-client-id.md delete mode 100644 src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql diff --git a/doc/release-notes/9686-move-harvesting-client-id.md b/doc/release-notes/9686-move-harvesting-client-id.md deleted file mode 100644 index 110fcc6ca6e..00000000000 --- a/doc/release-notes/9686-move-harvesting-client-id.md +++ /dev/null @@ -1 +0,0 @@ -With this release the harvesting client id will be available for harvested files. A database update will copy the id to previously harvested files./ diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index e2788e6acc6..a2f560bc959 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -752,9 +752,21 @@ public void setDatasetExternalCitations(List datasetEx this.datasetExternalCitations = datasetExternalCitations; } + @ManyToOne + @JoinColumn(name="harvestingClient_id") + private HarvestingClient harvestedFrom; - + public HarvestingClient getHarvestedFrom() { + return this.harvestedFrom; + } + public void setHarvestedFrom(HarvestingClient harvestingClientConfig) { + this.harvestedFrom = harvestingClientConfig; + } + + public boolean isHarvested() { + return this.harvestedFrom != null; + } private String harvestIdentifier; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 4c4aafdd1ec..c6df2a2e1ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -583,6 +583,54 @@ public Long getDatasetVersionCardImage(Long versionId, User user) { return null; } + /** + * Used to identify and properly display Harvested objects on the dataverse page. + * + * @param datasetIds + * @return + */ + public Map getArchiveDescriptionsForHarvestedDatasets(Set datasetIds){ + if (datasetIds == null || datasetIds.size() < 1) { + return null; + } + + String datasetIdStr = StringUtils.join(datasetIds, ", "); + + String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")"; + List searchResults; + + try { + searchResults = em.createNativeQuery(qstr).getResultList(); + } catch (Exception ex) { + searchResults = null; + } + + if (searchResults == null) { + return null; + } + + Map ret = new HashMap<>(); + + for (Object[] result : searchResults) { + Long dsId; + if (result[0] != null) { + try { + dsId = (Long)result[0]; + } catch (Exception ex) { + dsId = null; + } + if (dsId == null) { + continue; + } + + ret.put(dsId, (String)result[1]); + } + } + + return ret; + } + + public boolean isDatasetCardImageAvailable(DatasetVersion datasetVersion, User user) { if (datasetVersion == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 46955f52878..cc5d7620969 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.storageuse.StorageQuota; @@ -372,22 +371,6 @@ public GlobalId getGlobalId() { return globalId; } - @ManyToOne - @JoinColumn(name="harvestingClient_id") - private HarvestingClient harvestedFrom; - - public HarvestingClient getHarvestedFrom() { - return this.harvestedFrom; - } - - public void setHarvestedFrom(HarvestingClient harvestingClientConfig) { - this.harvestedFrom = harvestingClientConfig; - } - - public boolean isHarvested() { - return this.harvestedFrom != null; - } - public abstract T accept(Visitor v); @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java index 58a246b364a..d4219c36149 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java @@ -383,54 +383,6 @@ public Map getObjectPathsByIds(Set objectIds){ return ret; } - /** - * Used to identify and properly display Harvested objects on the dataverse page. - * - * @param dvObjectIds - * @return - */ - public Map getArchiveDescriptionsForHarvestedDvObjects(Set dvObjectIds){ - - if (dvObjectIds == null || dvObjectIds.size() < 1) { - return null; - } - - String dvObjectIsString = StringUtils.join(dvObjectIds, ", "); - String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, DvObject d WHERE d.harvestingClient_id = h.id AND d.id IN (" + dvObjectIsString + ")"; - List searchResults; - - try { - searchResults = em.createNativeQuery(qstr).getResultList(); - } catch (Exception ex) { - searchResults = null; - } - - if (searchResults == null) { - return null; - } - - Map ret = new HashMap<>(); - - for (Object[] result : searchResults) { - Long dvObjId; - if (result[0] != null) { - try { - Integer castResult = (Integer) result[0]; - dvObjId = Long.valueOf(castResult); - } catch (Exception ex) { - dvObjId = null; - } - if (dvObjId == null) { - continue; - } - ret.put(dvObjId, (String)result[1]); - } - } - - return ret; - } - - public String generateNewIdentifierByStoredProcedure() { StoredProcedureQuery query = this.em.createNamedStoredProcedureQuery("Dataset.generateIdentifierFromStoredProcedure"); query.execute(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index c5812403f31..c17ba909230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -332,11 +332,6 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId().asString()); - //adding the harvesting client id to harvested files #9686 - for (DataFile df : ds.getFiles()){ - df.setHarvestedFrom(harvestingClient); - } - if (existingDs != null) { // If this dataset already exists IN ANOTHER DATAVERSE // we are just going to skip it! diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java index 5747c64d217..7ec6d75a41c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java @@ -199,8 +199,8 @@ public void recordHarvestJobStatus(Long hcId, Date finishTime, int harvestedCoun public Long getNumberOfHarvestedDatasetsByAllClients() { try { - return (Long) em.createNativeQuery("SELECT count(d.id) FROM dvobject d " - + " WHERE d.harvestingclient_id IS NOT NULL and d.dtype = 'Dataset'").getSingleResult(); + return (Long) em.createNativeQuery("SELECT count(d.id) FROM dataset d " + + " WHERE d.harvestingclient_id IS NOT NULL").getSingleResult(); } catch (Exception ex) { logger.info("Warning: exception looking up the total number of harvested datasets: " + ex.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 9ae0c7cbb8f..1b5619c53e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -138,8 +138,8 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat + "from datasetversion\n" + "where versionstate='RELEASED' \n" + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : "and dataset_id in (select dataset.id from dataset, dvobject where dataset.id=dvobject.id\n") - + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NULL and publicationdate is not null\n " : "") - + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NOT NULL\n " : "") + + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NULL and publicationdate is not null\n " : "") + + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NOT NULL\n " : "") + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : ")\n") + "group by dataset_id) as subq group by subq.date order by date;" @@ -156,11 +156,11 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat * @param d */ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { - String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NULL)\n"; + String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NULL)\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated - String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NOT NULL)\n"; + String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NOT NULL)\n"; if (DATA_LOCATION_REMOTE.equals(dataLocation)) { dataLocationLine = harvestBaseLine; // replace } else if (DATA_LOCATION_ALL.equals(dataLocation)) { @@ -189,7 +189,7 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED' \n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + "and \n" @@ -198,6 +198,7 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { +") sub_temp" ); logger.log(Level.FINE, "Metric query: {0}", query); + return (long) query.getSingleResult(); } @@ -206,17 +207,16 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio // A published local datasets may have more than one released version! // So that's why we have to jump through some extra hoops below // in order to select the latest one: - String originClause = "(datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in\n" - + "(\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" - + " from datasetversion\n" - + " join dataset on dataset.id = datasetversion.dataset_id\n" - + " join dvobject on dataset.id = dvobject.id\n" - + " where versionstate='RELEASED'\n" - + " and dvobject.harvestingclient_id is null" - + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" - + " group by dataset_id\n" - + "))\n"; + String originClause = "(datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in\n" + + "(\n" + + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + + " from datasetversion\n" + + " join dataset on dataset.id = datasetversion.dataset_id\n" + + " where versionstate='RELEASED'\n" + + " and dataset.harvestingclient_id is null\n" + + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + " group by dataset_id\n" + + "))\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated @@ -225,7 +225,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio // so the query is simpler: String harvestOriginClause = "(\n" + " datasetversion.dataset_id = dataset.id\n" + - " AND dvobject.harvestingclient_id IS NOT null \n" + + " AND dataset.harvestingclient_id IS NOT null \n" + " AND date_trunc('month', datasetversion.createtime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + ")\n"; @@ -244,7 +244,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio + "JOIN datasetfieldtype ON datasetfieldtype.id = controlledvocabularyvalue.datasetfieldtype_id\n" + "JOIN datasetversion ON datasetversion.id = datasetfield.datasetversion_id\n" + "JOIN dataset ON dataset.id = datasetversion.dataset_id\n" - + "JOIN dvobject ON dvobject.id = dataset.id\n" + + ((d == null) ? "" : "JOIN dvobject ON dvobject.id = dataset.id\n") + "WHERE\n" + originClause + "AND datasetfieldtype.name = 'subject'\n" @@ -258,11 +258,11 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio } public long datasetsPastDays(int days, String dataLocation, Dataverse d) { - String dataLocationLine = "(releasetime > current_date - interval '" + days + "' day and dvobject.harvestingclient_id IS NULL)\n"; + String dataLocationLine = "(releasetime > current_date - interval '" + days + "' day and dataset.harvestingclient_id IS NULL)\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated - String harvestBaseLine = "(createtime > current_date - interval '" + days + "' day and dvobject.harvestingclient_id IS NOT NULL)\n"; + String harvestBaseLine = "(createtime > current_date - interval '" + days + "' day and dataset.harvestingclient_id IS NOT NULL)\n"; if (DATA_LOCATION_REMOTE.equals(dataLocation)) { dataLocationLine = harvestBaseLine; // replace } else if (DATA_LOCATION_ALL.equals(dataLocation)) { @@ -276,7 +276,7 @@ public long datasetsPastDays(int days, String dataLocation, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED' \n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and \n" @@ -304,7 +304,7 @@ public JsonArray filesTimeSeries(Dataverse d) { + "where datasetversion.id=filemetadata.datasetversion_id\n" + "and versionstate='RELEASED' \n" + "and dataset_id in (select dataset.id from dataset, dvobject where dataset.id=dvobject.id\n" - + "and dvobject.harvestingclient_id IS NULL and publicationdate is not null\n " + + "and dataset.harvestingclient_id IS NULL and publicationdate is not null\n " + ((d == null) ? ")" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + "))\n ") + "group by filemetadata.id) as subq group by subq.date order by date;"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -327,11 +327,11 @@ public long filesToMonth(String yyyymm, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED'\n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" - + "and dvobject.harvestingclient_id is null\n" + + "and dataset.harvestingclient_id is null\n" + "group by dataset_id \n" + ");" ); @@ -350,11 +350,11 @@ public long filesPastDays(int days, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED'\n" + "and releasetime > current_date - interval '" + days + "' day\n" + ((d == null) ? "" : "AND dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") - + "and dvobject.harvestingclient_id is null\n" + + "and dataset.harvestingclient_id is null\n" + "group by dataset_id \n" + ");" ); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 939b39b94ef..5a5d8781726 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -1367,7 +1367,6 @@ public boolean canPublishDataset(Long datasetId){ public void setDisplayCardValues() { Set harvestedDatasetIds = null; - Set harvestedFileIds = null; for (SolrSearchResult result : searchResultsList) { //logger.info("checking DisplayImage for the search result " + i++); if (result.getType().equals("dataverses")) { @@ -1393,10 +1392,10 @@ public void setDisplayCardValues() { } else if (result.getType().equals("files")) { result.setImageUrl(thumbnailServiceWrapper.getFileCardImageAsBase64Url(result)); if (result.isHarvested()) { - if (harvestedFileIds == null) { - harvestedFileIds = new HashSet<>(); + if (harvestedDatasetIds == null) { + harvestedDatasetIds = new HashSet<>(); } - harvestedFileIds.add(result.getEntityId()); + harvestedDatasetIds.add(result.getParentIdAsLong()); } } } @@ -1408,35 +1407,25 @@ public void setDisplayCardValues() { // SQL query: if (harvestedDatasetIds != null) { - Map descriptionsForHarvestedDatasets = dvObjectService.getArchiveDescriptionsForHarvestedDvObjects(harvestedDatasetIds); - if (descriptionsForHarvestedDatasets != null && !descriptionsForHarvestedDatasets.isEmpty()) { + Map descriptionsForHarvestedDatasets = datasetService.getArchiveDescriptionsForHarvestedDatasets(harvestedDatasetIds); + if (descriptionsForHarvestedDatasets != null && descriptionsForHarvestedDatasets.size() > 0) { for (SolrSearchResult result : searchResultsList) { - if (result.isHarvested() && result.getType().equals("datasets") && descriptionsForHarvestedDatasets.containsKey(result.getEntityId())) { - result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getEntityId())); + if (result.isHarvested()) { + if (result.getType().equals("files")) { + if (descriptionsForHarvestedDatasets.containsKey(result.getParentIdAsLong())) { + result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getParentIdAsLong())); + } + } else if (result.getType().equals("datasets")) { + if (descriptionsForHarvestedDatasets.containsKey(result.getEntityId())) { + result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getEntityId())); + } + } } } } descriptionsForHarvestedDatasets = null; harvestedDatasetIds = null; } - - if (harvestedFileIds != null) { - - Map descriptionsForHarvestedFiles = dvObjectService.getArchiveDescriptionsForHarvestedDvObjects(harvestedFileIds); - if (descriptionsForHarvestedFiles != null && !descriptionsForHarvestedFiles.isEmpty()) { - for (SolrSearchResult result : searchResultsList) { - if (result.isHarvested() && result.getType().equals("files") && descriptionsForHarvestedFiles.containsKey(result.getEntityId())) { - - result.setHarvestingDescription(descriptionsForHarvestedFiles.get(result.getEntityId())); - - } - } - } - descriptionsForHarvestedFiles = null; - harvestedDatasetIds = null; - - } - // determine which of the objects are linked: diff --git a/src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql b/src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql deleted file mode 100644 index 67ba026745f..00000000000 --- a/src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql +++ /dev/null @@ -1,14 +0,0 @@ -ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS harvestingclient_id BIGINT; - ---add harvesting client id to dvobject records of harvested datasets -update dvobject dvo set harvestingclient_id = s.harvestingclient_id from -(select id, harvestingclient_id from dataset d where d.harvestingclient_id is not null) s -where s.id = dvo.id; - ---add harvesting client id to dvobject records of harvested files -update dvobject dvo set harvestingclient_id = s.harvestingclient_id from -(select id, harvestingclient_id from dataset d where d.harvestingclient_id is not null) s -where s.id = dvo.owner_id; - -ALTER TABLE dataset drop COLUMN IF EXISTS harvestingclient_id; - diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 087db4858b2..9b51be4b365 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2548,8 +2548,6 @@ public void testLinkingDatasets() { EntityManager entityManager = entityManagerFactory.createEntityManager(); entityManager.getTransaction().begin(); // Do stuff... - //SEK 01/22/2024 - as of 6.2 harvestingclient_id will be on the dv object table - // so if this is ever implemented change will probably need to happen in the updatequery below entityManager.createNativeQuery("UPDATE dataset SET harvestingclient_id=1 WHERE id="+datasetId2).executeUpdate(); entityManager.getTransaction().commit(); entityManager.close(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java index 1425b7bc5d9..e3328eefb4a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java @@ -5,8 +5,6 @@ import edu.harvard.iq.dataverse.metrics.MetricsUtil; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.OK; -import java.time.LocalDate; -import java.time.format.DateTimeFormatter; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -18,13 +16,10 @@ //To improve these tests we should try adding data and see if the number DOESN'T //go up to show that the caching worked public class MetricsIT { - - private static String yyyymm; @BeforeAll public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); - yyyymm = LocalDate.now().format(DateTimeFormatter.ofPattern(MetricsUtil.YEAR_AND_MONTH_PATTERN)); UtilIT.clearMetricCache(); } @@ -35,7 +30,8 @@ public static void cleanUpClass() { @Test public void testGetDataversesToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsDataversesToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() @@ -58,7 +54,8 @@ public void testGetDataversesToMonth() { @Test public void testGetDatasetsToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsDatasetsToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() @@ -80,7 +77,8 @@ public void testGetDatasetsToMonth() { @Test public void testGetFilesToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsFilesToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() @@ -102,7 +100,8 @@ public void testGetFilesToMonth() { @Test public void testGetDownloadsToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsDownloadsToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() From 994cf18e5c91245404830ef7e03d682c68a43538 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 25 Jan 2024 16:34:16 -0500 Subject: [PATCH 0585/1112] add "running Dataverse in docker", other cleanup #10238 --- doc/sphinx-guides/source/container/index.rst | 20 ++------------ doc/sphinx-guides/source/container/intro.rst | 26 ++++++++++++++++++ .../source/container/running/backend-dev.rst | 7 +++++ .../source/container/running/demo.rst | 27 +++++++++++++++++++ .../source/container/running/frontend-dev.rst | 7 +++++ .../source/container/running/index.rst | 12 +++++++++ .../container/running/metadata-blocks.rst | 9 +++++++ .../source/container/running/production.rst | 11 ++++++++ docker/compose/demo/compose.yml | 0 9 files changed, 101 insertions(+), 18 deletions(-) create mode 100644 doc/sphinx-guides/source/container/intro.rst create mode 100644 doc/sphinx-guides/source/container/running/backend-dev.rst create mode 100644 doc/sphinx-guides/source/container/running/demo.rst create mode 100644 doc/sphinx-guides/source/container/running/frontend-dev.rst create mode 100755 doc/sphinx-guides/source/container/running/index.rst create mode 100644 doc/sphinx-guides/source/container/running/metadata-blocks.rst create mode 100644 doc/sphinx-guides/source/container/running/production.rst create mode 100644 docker/compose/demo/compose.yml diff --git a/doc/sphinx-guides/source/container/index.rst b/doc/sphinx-guides/source/container/index.rst index 4bbc87a4845..abf871dd340 100644 --- a/doc/sphinx-guides/source/container/index.rst +++ b/doc/sphinx-guides/source/container/index.rst @@ -1,28 +1,12 @@ Container Guide =============== -Running the Dataverse software in containers is quite different than in a :doc:`standard installation <../installation/prep>`. - -Both approaches have pros and cons. These days, containers are very often used for development and testing, -but there is an ever rising move toward running applications in the cloud using container technology. - -**NOTE:** -**As the Institute for Quantitative Social Sciences (IQSS) at Harvard is running a standard, non-containerized installation, -container support described in this guide is mostly created and maintained by the Dataverse community on a best-effort -basis.** - -This guide is *not* about installation on technology like Docker Swarm, Kubernetes, Rancher or other -solutions to run containers in production. There is the `Dataverse on K8s project `_ for this -purpose, as mentioned in the :doc:`/developers/containers` section of the Developer Guide. - -This guide focuses on describing the container images managed from the main Dataverse repository (again: by the -community, not IQSS), their features and limitations. Instructions on how to build the images yourself and how to -develop and extend them further are provided. - **Contents:** .. toctree:: + intro + running/index dev-usage base-image app-image diff --git a/doc/sphinx-guides/source/container/intro.rst b/doc/sphinx-guides/source/container/intro.rst new file mode 100644 index 00000000000..94b2c99f0d1 --- /dev/null +++ b/doc/sphinx-guides/source/container/intro.rst @@ -0,0 +1,26 @@ +Introduction +============ + +Dataverse in containers! + +.. contents:: |toctitle| + :local: + +Intended Audience +----------------- + +This guide is intended for anyone who wants to run Dataverse in containers. This is potentially a wide audience, from sysadmins interested in running Dataverse in production in containers (not recommended yet) to contributors working on a bug fix (encouraged!). + +.. _getting-help-containers: + +Getting Help +------------ + +Please ask in #containers at https://chat.dataverse.org + +.. _helping-containers: + +Helping with the Containerization Effort +---------------------------------------- + +In 2023 the Containerization Working Group started meeting regularly. All are welcome to join! We talk in #containers at https://chat.dataverse.org and have a regular video call. For details, please visit https://ct.gdcc.io diff --git a/doc/sphinx-guides/source/container/running/backend-dev.rst b/doc/sphinx-guides/source/container/running/backend-dev.rst new file mode 100644 index 00000000000..45aa4450bfb --- /dev/null +++ b/doc/sphinx-guides/source/container/running/backend-dev.rst @@ -0,0 +1,7 @@ +Backend Development +=================== + +.. contents:: |toctitle| + :local: + +See :doc:`../dev-usage`. diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst new file mode 100644 index 00000000000..71e45f5028e --- /dev/null +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -0,0 +1,27 @@ +Demo or Evaluation +================== + +If you would like to demo or evaluate Dataverse running in containers, you're in the right place. + +.. contents:: |toctitle| + :local: + +Hardware Requirements +--------------------- + +- 8 GB RAM + +Software Requirements +--------------------- + +- Mac, Linux, or Windows (experimental) +- Docker + +Windows support is experimental but we are very interested in supporting Windows better. Please report bugs and see :ref:`helping-containers`. + +Steps +----- + +- Download :download:`compose.yml <../../../../../docker/compose/demo/compose.yml>` +- Run ``docker compose up`` in the directory where you put ``compose.yml`` + diff --git a/doc/sphinx-guides/source/container/running/frontend-dev.rst b/doc/sphinx-guides/source/container/running/frontend-dev.rst new file mode 100644 index 00000000000..1f57d4531ba --- /dev/null +++ b/doc/sphinx-guides/source/container/running/frontend-dev.rst @@ -0,0 +1,7 @@ +Frontend Development +==================== + +.. contents:: |toctitle| + :local: + +https://github.com/IQSS/dataverse-frontend includes docs and scripts for running Dataverse in Docker for frontend development. diff --git a/doc/sphinx-guides/source/container/running/index.rst b/doc/sphinx-guides/source/container/running/index.rst new file mode 100755 index 00000000000..8d17b105eb4 --- /dev/null +++ b/doc/sphinx-guides/source/container/running/index.rst @@ -0,0 +1,12 @@ +Running Dataverse in Docker +=========================== + +Contents: + +.. toctree:: + + production + demo + metadata-blocks + frontend-dev + backend-dev diff --git a/doc/sphinx-guides/source/container/running/metadata-blocks.rst b/doc/sphinx-guides/source/container/running/metadata-blocks.rst new file mode 100644 index 00000000000..4794f29ab42 --- /dev/null +++ b/doc/sphinx-guides/source/container/running/metadata-blocks.rst @@ -0,0 +1,9 @@ +Editing Metadata Blocks +======================= + +.. contents:: |toctitle| + :local: + +The Admin Guide has a section on :doc:`/admin/metadatacustomization` and suggests running Dataverse in containers (Docker) for this purpose. + +This is certainly possible but the specifics have not yet been written. Until then, please see :doc:`demo`, which should also provide a suitable environment. diff --git a/doc/sphinx-guides/source/container/running/production.rst b/doc/sphinx-guides/source/container/running/production.rst new file mode 100644 index 00000000000..89e63ff5ab1 --- /dev/null +++ b/doc/sphinx-guides/source/container/running/production.rst @@ -0,0 +1,11 @@ +Production (Future) +=================== + +.. contents:: |toctitle| + :local: + +The images described in this guide not yet recommended for production usage. + +You can help the effort to support these images in production by trying them out and giving feedback (see :ref:`helping-containers`). + +For now, please follow :doc:`demo`. diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml new file mode 100644 index 00000000000..e69de29bb2d From fb58d895edac32744cae7b164d7ae9f1121dba94 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jan 2024 10:58:07 -0500 Subject: [PATCH 0586/1112] tweaks and more use cases #10238 --- doc/sphinx-guides/source/container/intro.rst | 2 +- .../source/container/running/backend-dev.rst | 3 +++ .../source/container/running/demo.rst | 4 ++-- .../source/container/running/frontend-dev.rst | 5 ++++- .../source/container/running/github-action.rst | 18 ++++++++++++++++++ .../source/container/running/index.rst | 1 + .../container/running/metadata-blocks.rst | 8 +++++++- .../source/container/running/production.rst | 15 ++++++++++++--- 8 files changed, 48 insertions(+), 8 deletions(-) create mode 100644 doc/sphinx-guides/source/container/running/github-action.rst diff --git a/doc/sphinx-guides/source/container/intro.rst b/doc/sphinx-guides/source/container/intro.rst index 94b2c99f0d1..42b095f3158 100644 --- a/doc/sphinx-guides/source/container/intro.rst +++ b/doc/sphinx-guides/source/container/intro.rst @@ -9,7 +9,7 @@ Dataverse in containers! Intended Audience ----------------- -This guide is intended for anyone who wants to run Dataverse in containers. This is potentially a wide audience, from sysadmins interested in running Dataverse in production in containers (not recommended yet) to contributors working on a bug fix (encouraged!). +This guide is intended for anyone who wants to run Dataverse in containers. This is potentially a wide audience, from sysadmins interested in running Dataverse in production in containers (not recommended yet) to contributors working on a bug fix (encouraged!). See :doc:`running/index` for various scenarios and please let us know if your use case is not covered. .. _getting-help-containers: diff --git a/doc/sphinx-guides/source/container/running/backend-dev.rst b/doc/sphinx-guides/source/container/running/backend-dev.rst index 45aa4450bfb..8b2dab956ad 100644 --- a/doc/sphinx-guides/source/container/running/backend-dev.rst +++ b/doc/sphinx-guides/source/container/running/backend-dev.rst @@ -4,4 +4,7 @@ Backend Development .. contents:: |toctitle| :local: +Intro +----- + See :doc:`../dev-usage`. diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 71e45f5028e..8db8cfb2a9c 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -1,7 +1,7 @@ Demo or Evaluation ================== -If you would like to demo or evaluate Dataverse running in containers, you're in the right place. +If you would like to demo or evaluate Dataverse running in containers, you're in the right place. Your feedback is extremely valuable to us! To let us know what you think, pease see :ref:`helping-containers`. .. contents:: |toctitle| :local: @@ -17,7 +17,7 @@ Software Requirements - Mac, Linux, or Windows (experimental) - Docker -Windows support is experimental but we are very interested in supporting Windows better. Please report bugs and see :ref:`helping-containers`. +Windows support is experimental but we are very interested in supporting Windows better. Please report bugs (see :ref:`helping-containers`). Steps ----- diff --git a/doc/sphinx-guides/source/container/running/frontend-dev.rst b/doc/sphinx-guides/source/container/running/frontend-dev.rst index 1f57d4531ba..88d40c12053 100644 --- a/doc/sphinx-guides/source/container/running/frontend-dev.rst +++ b/doc/sphinx-guides/source/container/running/frontend-dev.rst @@ -4,4 +4,7 @@ Frontend Development .. contents:: |toctitle| :local: -https://github.com/IQSS/dataverse-frontend includes docs and scripts for running Dataverse in Docker for frontend development. +Intro +----- + +The frontend (web interface) of Dataverse is being decoupled from the backend. This evolving codebase has its own repo at https://github.com/IQSS/dataverse-frontend which includes docs and scripts for running the backend of Dataverse in Docker. diff --git a/doc/sphinx-guides/source/container/running/github-action.rst b/doc/sphinx-guides/source/container/running/github-action.rst new file mode 100644 index 00000000000..ae42dd494d1 --- /dev/null +++ b/doc/sphinx-guides/source/container/running/github-action.rst @@ -0,0 +1,18 @@ +GitHub Action +============= + +.. contents:: |toctitle| + :local: + +Intro +----- + +A GitHub Action is under development that will spin up a Dataverse instance within the context of GitHub CI workflows: https://github.com/gdcc/dataverse-action + +Use Cases +--------- + +Use cases for the GitHub Action include: + +- Testing :doc:`/api/client-libraries` that interact with Dataverse APIs +- Testing :doc:`/admin/integrations` of third party software with Dataverse diff --git a/doc/sphinx-guides/source/container/running/index.rst b/doc/sphinx-guides/source/container/running/index.rst index 8d17b105eb4..a02266f7cba 100755 --- a/doc/sphinx-guides/source/container/running/index.rst +++ b/doc/sphinx-guides/source/container/running/index.rst @@ -8,5 +8,6 @@ Contents: production demo metadata-blocks + github-action frontend-dev backend-dev diff --git a/doc/sphinx-guides/source/container/running/metadata-blocks.rst b/doc/sphinx-guides/source/container/running/metadata-blocks.rst index 4794f29ab42..fcc80ce1909 100644 --- a/doc/sphinx-guides/source/container/running/metadata-blocks.rst +++ b/doc/sphinx-guides/source/container/running/metadata-blocks.rst @@ -4,6 +4,12 @@ Editing Metadata Blocks .. contents:: |toctitle| :local: +Intro +----- + The Admin Guide has a section on :doc:`/admin/metadatacustomization` and suggests running Dataverse in containers (Docker) for this purpose. -This is certainly possible but the specifics have not yet been written. Until then, please see :doc:`demo`, which should also provide a suitable environment. +Status +------ + +For now, please see :doc:`demo`, which should also provide a suitable Dockerized Dataverse environment. diff --git a/doc/sphinx-guides/source/container/running/production.rst b/doc/sphinx-guides/source/container/running/production.rst index 89e63ff5ab1..0a628dc57b9 100644 --- a/doc/sphinx-guides/source/container/running/production.rst +++ b/doc/sphinx-guides/source/container/running/production.rst @@ -4,8 +4,17 @@ Production (Future) .. contents:: |toctitle| :local: -The images described in this guide not yet recommended for production usage. +Status +------ -You can help the effort to support these images in production by trying them out and giving feedback (see :ref:`helping-containers`). +The images described in this guide are not yet recommended for production usage. -For now, please follow :doc:`demo`. +How to Help +----------- + +You can help the effort to support these images in production by trying them out (see :doc:`demo`) and giving feedback (see :ref:`helping-containers`). + +Alternatives +------------ + +Until the images are ready for production, please use the traditional installation method described in the :doc:`/installation/index`. From b7ec6465b09e41929f985089c2a5c566e95308e4 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 26 Jan 2024 11:12:50 -0500 Subject: [PATCH 0587/1112] #9748 delete tools only added by tests --- .../iq/dataverse/api/ExternalToolsIT.java | 102 +++++++----------- 1 file changed, 39 insertions(+), 63 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 022747a3cdc..664c07d598c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -40,21 +40,6 @@ public void testGetExternalTools() { @Test public void testFileLevelTool1() { - // Delete all external tools before testing. - Response getTools = UtilIT.getExternalTools(); - getTools.prettyPrint(); - getTools.then().assertThat() - .statusCode(OK.getStatusCode()); - String body = getTools.getBody().asString(); - JsonReader bodyObject = Json.createReader(new StringReader(body)); - JsonArray tools = bodyObject.readObject().getJsonArray("data"); - for (int i = 0; i < tools.size(); i++) { - JsonObject tool = tools.getJsonObject(i); - int id = tool.getInt("id"); - Response deleteExternalTool = UtilIT.deleteExternalTool(id); - deleteExternalTool.prettyPrint(); - } - Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); createUser.then().assertThat() @@ -145,26 +130,14 @@ public void testFileLevelTool1() { .statusCode(OK.getStatusCode()) // No tools for this file type. .body("data", Matchers.hasSize(0)); + + //Delete the tool added by this test... + Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); } @Test public void testDatasetLevelTool1() { - // Delete all external tools before testing. - Response getTools = UtilIT.getExternalTools(); - getTools.prettyPrint(); - getTools.then().assertThat() - .statusCode(OK.getStatusCode()); - String body = getTools.getBody().asString(); - JsonReader bodyObject = Json.createReader(new StringReader(body)); - JsonArray tools = bodyObject.readObject().getJsonArray("data"); - for (int i = 0; i < tools.size(); i++) { - JsonObject tool = tools.getJsonObject(i); - int id = tool.getInt("id"); - Response deleteExternalTool = UtilIT.deleteExternalTool(id); - deleteExternalTool.prettyPrint(); - } - Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); createUser.then().assertThat() @@ -184,7 +157,6 @@ public void testDatasetLevelTool1() { createDataset.then().assertThat() .statusCode(CREATED.getStatusCode()); -// Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); Integer datasetId = JsonPath.from(createDataset.getBody().asString()).getInt("data.id"); String datasetPid = JsonPath.from(createDataset.getBody().asString()).getString("data.persistentId"); @@ -219,6 +191,8 @@ public void testDatasetLevelTool1() { addExternalTool.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.displayName", CoreMatchers.equalTo("DatasetTool1")); + + long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); Response getExternalToolsByDatasetIdInvalidType = UtilIT.getExternalToolsForDataset(datasetId.toString(), "invalidType", apiToken); getExternalToolsByDatasetIdInvalidType.prettyPrint(); @@ -233,27 +207,16 @@ public void testDatasetLevelTool1() { .body("data[0].scope", CoreMatchers.equalTo("dataset")) .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("http://datasettool1.com?datasetPid=" + datasetPid + "&key=" + apiToken)) .statusCode(OK.getStatusCode()); - + + //Delete the tool added by this test... + Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); + deleteExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); } @Test public void testDatasetLevelToolConfigure() { - // Delete all external tools before testing. - Response getTools = UtilIT.getExternalTools(); - getTools.prettyPrint(); - getTools.then().assertThat() - .statusCode(OK.getStatusCode()); - String body = getTools.getBody().asString(); - JsonReader bodyObject = Json.createReader(new StringReader(body)); - JsonArray tools = bodyObject.readObject().getJsonArray("data"); - for (int i = 0; i < tools.size(); i++) { - JsonObject tool = tools.getJsonObject(i); - int id = tool.getInt("id"); - Response deleteExternalTool = UtilIT.deleteExternalTool(id); - deleteExternalTool.prettyPrint(); - } - Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); createUser.then().assertThat() @@ -302,6 +265,8 @@ public void testDatasetLevelToolConfigure() { addExternalTool.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.displayName", CoreMatchers.equalTo("Dataset Configurator")); + + long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); Response getExternalToolsByDatasetId = UtilIT.getExternalToolsForDataset(datasetId.toString(), "configure", apiToken); getExternalToolsByDatasetId.prettyPrint(); @@ -311,6 +276,11 @@ public void testDatasetLevelToolConfigure() { .body("data[0].types[0]", CoreMatchers.equalTo("configure")) .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("https://datasetconfigurator.com?datasetPid=" + datasetPid)) .statusCode(OK.getStatusCode()); + + //Delete the tool added by this test... + Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); + deleteExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); } @@ -400,12 +370,13 @@ public void deleteTools() { String body = getTools.getBody().asString(); JsonReader bodyObject = Json.createReader(new StringReader(body)); JsonArray tools = bodyObject.readObject().getJsonArray("data"); + /* for (int i = 0; i < tools.size(); i++) { JsonObject tool = tools.getJsonObject(i); int id = tool.getInt("id"); Response deleteExternalTool = UtilIT.deleteExternalTool(id); deleteExternalTool.prettyPrint(); - } + }*/ } // preview only @@ -446,6 +417,13 @@ public void createToolShellScript() { addExternalTool.prettyPrint(); addExternalTool.then().assertThat() .statusCode(OK.getStatusCode()); + + long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); + + //Delete the tool added by this test... + Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); + deleteExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); } // explore only @@ -479,6 +457,13 @@ public void createToolDataExplorer() { addExternalTool.prettyPrint(); addExternalTool.then().assertThat() .statusCode(OK.getStatusCode()); + + long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); + + //Delete the tool added by this test... + Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); + deleteExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); } // both preview and explore @@ -527,21 +512,6 @@ public void createToolSpreadsheetViewer() { @Test public void testFileLevelToolWithAuxFileReq() throws IOException { - // Delete all external tools before testing. - Response getTools = UtilIT.getExternalTools(); - getTools.prettyPrint(); - getTools.then().assertThat() - .statusCode(OK.getStatusCode()); - String body = getTools.getBody().asString(); - JsonReader bodyObject = Json.createReader(new StringReader(body)); - JsonArray tools = bodyObject.readObject().getJsonArray("data"); - for (int i = 0; i < tools.size(); i++) { - JsonObject tool = tools.getJsonObject(i); - int id = tool.getInt("id"); - Response deleteExternalTool = UtilIT.deleteExternalTool(id); - deleteExternalTool.prettyPrint(); - } - Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); createUser.then().assertThat() @@ -640,6 +610,12 @@ public void testFileLevelToolWithAuxFileReq() throws IOException { .body("data[0].displayName", CoreMatchers.equalTo("HDF5 Tool")) .body("data[0].scope", CoreMatchers.equalTo("file")) .body("data[0].contentType", CoreMatchers.equalTo("application/x-hdf5")); + + //Delete the tool added by this test... + Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); + deleteExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); + } } From cc29efecd2748ad005760610c6be65ba073b35c6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jan 2024 11:30:19 -0500 Subject: [PATCH 0588/1112] stub out demo/eval compose.yml based on dev compose #10238 Differences from dev version: - localstack and minio removed - env vars filled in based on current .env The goal is to have a single file to download, rather than a compose file and an .env file. --- docker/compose/demo/compose.yml | 170 ++++++++++++++++++++++++++++++++ 1 file changed, 170 insertions(+) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index e69de29bb2d..aea99040acd 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -0,0 +1,170 @@ +version: "2.4" + +services: + + dev_dataverse: + container_name: "dev_dataverse" + hostname: dataverse + image: gdcc/dataverse:unstable + restart: on-failure + user: payara + environment: + DATAVERSE_DB_HOST: postgres + DATAVERSE_DB_PASSWORD: secret + DATAVERSE_DB_USER: dataverse + ENABLE_JDWP: "1" + DATAVERSE_FEATURE_API_BEARER_AUTH: "1" + DATAVERSE_AUTH_OIDC_ENABLED: "1" + DATAVERSE_AUTH_OIDC_CLIENT_ID: test + DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 + DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL: http://keycloak.mydomain.com:8090/realms/test + DATAVERSE_JSF_REFRESH_PERIOD: "1" + # These two oai settings are here to get HarvestingServerIT to pass + dataverse_oai_server_maxidentifiers: "2" + dataverse_oai_server_maxrecords: "2" + JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 + -Ddataverse.files.file1.type=file + -Ddataverse.files.file1.label=Filesystem + -Ddataverse.files.file1.directory=${STORAGE_DIR}/store + ports: + - "8080:8080" # HTTP (Dataverse Application) + - "4848:4848" # HTTP (Payara Admin Console) + - "9009:9009" # JDWP + - "8686:8686" # JMX + networks: + - dataverse + depends_on: + - dev_postgres + - dev_solr + - dev_dv_initializer + volumes: + - ./docker-dev-volumes/app/data:/dv + - ./docker-dev-volumes/app/secrets:/secrets + # Uncomment to map the glassfish applications folder so that we can update webapp resources using scripts/intellij/cpwebapp.sh + # - ./docker-dev-volumes/glassfish/applications:/opt/payara/appserver/glassfish/domains/domain1/applications + # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). + # Replace 6.0 with the current version. + # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse + tmpfs: + - /dumps:mode=770,size=2052M,uid=1000,gid=1000 + - /tmp:mode=770,size=2052M,uid=1000,gid=1000 + mem_limit: 2147483648 # 2 GiB + mem_reservation: 1024m + privileged: false + + dev_bootstrap: + container_name: "dev_bootstrap" + image: gdcc/configbaker:unstable + restart: "no" + command: + - bootstrap.sh + - dev + networks: + - dataverse + + dev_dv_initializer: + container_name: "dev_dv_initializer" + image: gdcc/configbaker:unstable + restart: "no" + command: + - sh + - -c + - "fix-fs-perms.sh dv" + volumes: + - ./docker-dev-volumes/app/data:/dv + + dev_postgres: + container_name: "dev_postgres" + hostname: postgres + image: postgres:13 + restart: on-failure + environment: + - POSTGRES_USER=dataverse + - POSTGRES_PASSWORD=secret + ports: + - "5432:5432" + networks: + - dataverse + volumes: + - ./docker-dev-volumes/postgresql/data:/var/lib/postgresql/data + + dev_solr_initializer: + container_name: "dev_solr_initializer" + image: gdcc/configbaker:unstable + restart: "no" + command: + - sh + - -c + - "fix-fs-perms.sh solr && cp -a /template/* /solr-template" + volumes: + - ./docker-dev-volumes/solr/data:/var/solr + - ./docker-dev-volumes/solr/conf:/solr-template + + dev_solr: + container_name: "dev_solr" + hostname: "solr" + image: solr:9.3.0 + depends_on: + - dev_solr_initializer + restart: on-failure + ports: + - "8983:8983" + networks: + - dataverse + command: + - "solr-precreate" + - "collection1" + - "/template" + volumes: + - ./docker-dev-volumes/solr/data:/var/solr + - ./docker-dev-volumes/solr/conf:/template + + dev_smtp: + container_name: "dev_smtp" + hostname: "smtp" + image: maildev/maildev:2.0.5 + restart: on-failure + ports: + - "25:25" # smtp server + - "1080:1080" # web ui + environment: + - MAILDEV_SMTP_PORT=25 + - MAILDEV_MAIL_DIRECTORY=/mail + networks: + - dataverse + #volumes: + # - ./docker-dev-volumes/smtp/data:/mail + tmpfs: + - /mail:mode=770,size=128M,uid=1000,gid=1000 + + dev_keycloak: + container_name: "dev_keycloak" + image: 'quay.io/keycloak/keycloak:21.0' + hostname: keycloak + environment: + - KEYCLOAK_ADMIN=kcadmin + - KEYCLOAK_ADMIN_PASSWORD=kcpassword + - KEYCLOAK_LOGLEVEL=DEBUG + - KC_HOSTNAME_STRICT=false + networks: + dataverse: + aliases: + - keycloak.mydomain.com #create a DNS alias within the network (add the same alias to your /etc/hosts to get a working OIDC flow) + command: start-dev --import-realm --http-port=8090 # change port to 8090, so within the network and external the same port is used + ports: + - "8090:8090" + volumes: + - './conf/keycloak/test-realm.json:/opt/keycloak/data/import/test-realm.json' + + dev_nginx: + container_name: dev_nginx + image: gdcc/dev_nginx:unstable + ports: + - "4849:4849" + restart: always + networks: + - dataverse + +networks: + dataverse: + driver: bridge From 0c736cc698a3fef25fa8d5f25e76d4a85a6ec088 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jan 2024 12:47:38 -0500 Subject: [PATCH 0589/1112] switch from unstable to alpha images #10238 --- docker/compose/demo/compose.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index aea99040acd..403143130ac 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -5,7 +5,7 @@ services: dev_dataverse: container_name: "dev_dataverse" hostname: dataverse - image: gdcc/dataverse:unstable + image: gdcc/dataverse:alpha restart: on-failure user: payara environment: @@ -54,7 +54,7 @@ services: dev_bootstrap: container_name: "dev_bootstrap" - image: gdcc/configbaker:unstable + image: gdcc/configbaker:alpha restart: "no" command: - bootstrap.sh @@ -64,7 +64,7 @@ services: dev_dv_initializer: container_name: "dev_dv_initializer" - image: gdcc/configbaker:unstable + image: gdcc/configbaker:alpha restart: "no" command: - sh @@ -90,7 +90,7 @@ services: dev_solr_initializer: container_name: "dev_solr_initializer" - image: gdcc/configbaker:unstable + image: gdcc/configbaker:alpha restart: "no" command: - sh From 91287b35960afd0d351d1b07942333763ce84555 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 26 Jan 2024 15:55:12 -0500 Subject: [PATCH 0590/1112] #9748 one more assert --- src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 664c07d598c..6f0aa499dd1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -133,6 +133,8 @@ public void testFileLevelTool1() { //Delete the tool added by this test... Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); + deleteExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); } @Test From 69d3bb9172ad134c32299a326ef76efda2420458 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jan 2024 16:21:58 -0500 Subject: [PATCH 0591/1112] more content for demo/eval #10238 Also update tags section under "app image" (now live). --- .../source/container/app-image.rst | 18 +-- doc/sphinx-guides/source/container/intro.rst | 2 + .../source/container/running/demo.rst | 125 ++++++++++++++++-- 3 files changed, 126 insertions(+), 19 deletions(-) diff --git a/doc/sphinx-guides/source/container/app-image.rst b/doc/sphinx-guides/source/container/app-image.rst index 29f6d6ac1d4..caf4aadbf7e 100644 --- a/doc/sphinx-guides/source/container/app-image.rst +++ b/doc/sphinx-guides/source/container/app-image.rst @@ -22,20 +22,20 @@ IQSS will not offer you support how to deploy or run it, please reach out to the You might be interested in taking a look at :doc:`../developers/containers`, linking you to some (community-based) efforts. - +.. _supported-image-tags-app: Supported Image Tags ++++++++++++++++++++ This image is sourced from the main upstream code `repository of the Dataverse software `_. -Development and maintenance of the `image's code `_ happens there -(again, by the community). - -.. note:: - Please note that this image is not (yet) available from Docker Hub. You need to build local to use - (see below). Follow https://github.com/IQSS/dataverse/issues/9444 for new developments. - - +Development and maintenance of the `image's code `_ +happens there (again, by the community). Community-supported image tags are based on the two most important +upstream branches: + +- The ``unstable`` tag corresponds to the ``develop`` branch, where pull requests are merged. + (`Dockerfile `__) +- The ``alpha`` tag corresponds to the ``master`` branch, where releases are cut from. + (`Dockerfile `__) Image Contents ++++++++++++++ diff --git a/doc/sphinx-guides/source/container/intro.rst b/doc/sphinx-guides/source/container/intro.rst index 42b095f3158..5099531dcc9 100644 --- a/doc/sphinx-guides/source/container/intro.rst +++ b/doc/sphinx-guides/source/container/intro.rst @@ -18,6 +18,8 @@ Getting Help Please ask in #containers at https://chat.dataverse.org +Alternatively, you can try one or more of the channels under :ref:`support`. + .. _helping-containers: Helping with the Containerization Effort diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 8db8cfb2a9c..0ad1e50442f 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -1,27 +1,132 @@ Demo or Evaluation ================== -If you would like to demo or evaluate Dataverse running in containers, you're in the right place. Your feedback is extremely valuable to us! To let us know what you think, pease see :ref:`helping-containers`. +If you would like to demo or evaluate Dataverse running in containers, you're in the right place. Your feedback is extremely valuable to us! To let us know what you think, please see :ref:`helping-containers`. .. contents:: |toctitle| :local: -Hardware Requirements ---------------------- +Quickstart +---------- -- 8 GB RAM +- Download :download:`compose.yml <../../../../../docker/compose/demo/compose.yml>` +- Run ``docker compose up`` in the directory where you put ``compose.yml`` +- Visit http://localhost:8080 and try logging in: + + - username: dataverseAdmin + - password: admin1 -Software Requirements ---------------------- +Hardware and Software Requirements +----------------------------------- +- 8 GB RAM (if not much else is running) - Mac, Linux, or Windows (experimental) - Docker Windows support is experimental but we are very interested in supporting Windows better. Please report bugs (see :ref:`helping-containers`). -Steps ------ +Tags and Versions +----------------- -- Download :download:`compose.yml <../../../../../docker/compose/demo/compose.yml>` -- Run ``docker compose up`` in the directory where you put ``compose.yml`` +The compose file references a tag called "alpha", which corresponds to the latest released version of Dataverse. This means that if a release of Dataverse comes out while you are demo'ing or evaluating, the version of Dataverse you are using could change. We are aware that there is a desire for tags that correspond to versions to ensure consistency. You are welcome to join `the discussion `_ and otherwise get in touch (see :ref:`helping-containers`). For more on tags, see :ref:`supported-image-tags-app`. + +Once Dataverse is running, you can check which version you have through the normal methods: + +- Check the bottom right in a web browser. +- Check http://localhost:8080/api/info/version via API. + +About the Containers +-------------------- + +If you run ``docker ps``, you'll see that multiple containers are spun up in a demo or evaluation. Here are the most important ones: + +- dataverse +- postgres +- solr +- smtp +- bootstrap + +Most are self-explanatory, and correspond to components listed under :doc:`/installation/prerequisites` in the (traditional) Installation Guide, but "bootstrap" refers to :doc:`../configbaker-image`. + +Additional containers are used in development (see :doc:`../dev-usage`), but for the purposes of a demo or evaluation, fewer moving (sometimes pointy) parts are included. + +Security +-------- + +Please be aware that for now, the "dev" persona is used to bootstrap Dataverse, which means that admin APIs are wide open (to allow developers to test them; see :ref:`securing-your-installation` for more on API blocking), the "create user" key is set to a default value, etc. You can inspect the dev person `on GitHub `_ (look for ``--insecure``). + +We plan to ship a "demo" persona but it is not ready yet. See also :ref:`configbaker-personas`. + +Common Operations +----------------- + +Starting the Containers ++++++++++++++++++++++++ + +First, download :download:`compose.yml <../../../../../docker/compose/demo/compose.yml>` and place it somewhere you'll remember. + +Then, run ``docker compose up`` in the directory where you put ``compose.yml`` + +Starting the containers for the first time involves a bootstrap process. You should see "have a nice day" output at the end. + +Stopping the Containers ++++++++++++++++++++++++ + +You might want to stop the containers if you aren't using them. Hit ``Ctrl-c`` (hold down the ``Ctrl`` key and then hit the ``c`` key). + +You data is still intact and you can start the containers again with ``docker compose up``. + +Deleting the Containers ++++++++++++++++++++++++ + +If you no longer need the containers because your demo or evaluation is finished and you want to reclaim disk space, run ``docker compose down`` in the directory where you put ``compose.yml``. + +Deleting the Data Directory ++++++++++++++++++++++++++++ + +Data related to the Dataverse containers is placed in a directory called ``docker-dev-volumes`` next to the ``compose.yml`` file. If you are finished with your demo or evaluation or you want to start fresh, simply delete this directory. + +Configuration +------------- + +Configuration is described in greater detail under :doc:`/installation/config` in the Installation Guide, but there are some specifics to running in containers you should know about. + +.. _configbaker-personas: + +Personas +++++++++ + +When the containers are bootstrapped, the "dev" persona is used. In the future we plan to add a "demo" persona that is more suited to demo and evaluation use cases. + +Database Settings ++++++++++++++++++ + +Updating database settings is the same as described under :ref:`database-settings` in the Installation Guide. + +MPCONFIG Options +++++++++++++++++ + +The compose file contains an ``environment`` section with various MicroProfile Config (MPCONFIG) options. You can experiment with this by adding ``DATAVERSE_VERSION: foobar`` to change the (displayed) version of Dataverse to "foobar". + +JVM Options ++++++++++++ + +JVM options are not especially easy to change in the container. The general process is to get a shell on the "dataverse" container, change the settings, and then stop and start the containers. See :ref:`jvm-options` for more. + +Troubleshooting +--------------- + +Bootstrapping Did Not Complete +++++++++++++++++++++++++++++++ + +In the compose file, try increasing the timeout in the bootstrap container by adding something like this: + +.. code-block:: bash + + environment: + - TIMEOUT=10m + +Getting Help +------------ +Please do not be shy about reaching out for help. We very much want you to have a pleasant demo or evaluation experience. For ways to contact us, please see See :ref:`getting-help-containers`. From d3a378de0815a8d9af94fe8972f61d95841f89f2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jan 2024 16:23:20 -0500 Subject: [PATCH 0592/1112] remove limits used for harvesting tests #10238 --- docker/compose/demo/compose.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 403143130ac..4cfd8cd9345 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -19,9 +19,6 @@ services: DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL: http://keycloak.mydomain.com:8090/realms/test DATAVERSE_JSF_REFRESH_PERIOD: "1" - # These two oai settings are here to get HarvestingServerIT to pass - dataverse_oai_server_maxidentifiers: "2" - dataverse_oai_server_maxrecords: "2" JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 -Ddataverse.files.file1.type=file -Ddataverse.files.file1.label=Filesystem From 4555ae3f9dae12fd83c369b846c4aff114fecbf0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jan 2024 16:25:59 -0500 Subject: [PATCH 0593/1112] remove keycloak container and OIDC config #10238 --- docker/compose/demo/compose.yml | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 4cfd8cd9345..e0839eb1023 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -14,10 +14,6 @@ services: DATAVERSE_DB_USER: dataverse ENABLE_JDWP: "1" DATAVERSE_FEATURE_API_BEARER_AUTH: "1" - DATAVERSE_AUTH_OIDC_ENABLED: "1" - DATAVERSE_AUTH_OIDC_CLIENT_ID: test - DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL: http://keycloak.mydomain.com:8090/realms/test DATAVERSE_JSF_REFRESH_PERIOD: "1" JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 -Ddataverse.files.file1.type=file @@ -134,25 +130,6 @@ services: tmpfs: - /mail:mode=770,size=128M,uid=1000,gid=1000 - dev_keycloak: - container_name: "dev_keycloak" - image: 'quay.io/keycloak/keycloak:21.0' - hostname: keycloak - environment: - - KEYCLOAK_ADMIN=kcadmin - - KEYCLOAK_ADMIN_PASSWORD=kcpassword - - KEYCLOAK_LOGLEVEL=DEBUG - - KC_HOSTNAME_STRICT=false - networks: - dataverse: - aliases: - - keycloak.mydomain.com #create a DNS alias within the network (add the same alias to your /etc/hosts to get a working OIDC flow) - command: start-dev --import-realm --http-port=8090 # change port to 8090, so within the network and external the same port is used - ports: - - "8090:8090" - volumes: - - './conf/keycloak/test-realm.json:/opt/keycloak/data/import/test-realm.json' - dev_nginx: container_name: dev_nginx image: gdcc/dev_nginx:unstable From bb4d78649338ced4f66ec4ba4167c6a94efcd23f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 Jan 2024 16:29:33 -0500 Subject: [PATCH 0594/1112] remove various dev stuff not needed for a demo #10238 --- docker/compose/demo/compose.yml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index e0839eb1023..b72d06951e8 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -12,9 +12,7 @@ services: DATAVERSE_DB_HOST: postgres DATAVERSE_DB_PASSWORD: secret DATAVERSE_DB_USER: dataverse - ENABLE_JDWP: "1" DATAVERSE_FEATURE_API_BEARER_AUTH: "1" - DATAVERSE_JSF_REFRESH_PERIOD: "1" JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 -Ddataverse.files.file1.type=file -Ddataverse.files.file1.label=Filesystem @@ -33,11 +31,6 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets - # Uncomment to map the glassfish applications folder so that we can update webapp resources using scripts/intellij/cpwebapp.sh - # - ./docker-dev-volumes/glassfish/applications:/opt/payara/appserver/glassfish/domains/domain1/applications - # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). - # Replace 6.0 with the current version. - # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: - /dumps:mode=770,size=2052M,uid=1000,gid=1000 - /tmp:mode=770,size=2052M,uid=1000,gid=1000 @@ -130,15 +123,6 @@ services: tmpfs: - /mail:mode=770,size=128M,uid=1000,gid=1000 - dev_nginx: - container_name: dev_nginx - image: gdcc/dev_nginx:unstable - ports: - - "4849:4849" - restart: always - networks: - - dataverse - networks: dataverse: driver: bridge From a5b07964dbaca8dc4f436d4ae9405548d4a01374 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 28 Jan 2024 10:53:46 -0500 Subject: [PATCH 0595/1112] Straightforward fixes for the broken redirects for harvested records from "generic OAI" archives. #10254 --- src/main/java/edu/harvard/iq/dataverse/Dataset.java | 9 +++++++++ src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index a2f560bc959..f75c3d92881 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -852,6 +852,15 @@ public String getRemoteArchiveURL() { if (StringUtil.nonEmpty(this.getProtocol()) && StringUtil.nonEmpty(this.getAuthority()) && StringUtil.nonEmpty(this.getIdentifier())) { + + // If there is a custom archival url for this Harvesting + // Source, we'll use that + String harvestingUrl = this.getHarvestedFrom().getHarvestingUrl(); + String archivalUrl = this.getHarvestedFrom().getArchiveUrl(); + if (!harvestingUrl.contains(archivalUrl)) { + return archivalUrl + this.getAuthority() + "/" + this.getIdentifier(); + } + // ... if not, we'll redirect to the resolver for the global id: return this.getPersistentURL(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index b79f387f20b..1f2e603d984 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2026,7 +2026,7 @@ private String init(boolean initFull) { // to the local 404 page, below. logger.warning("failed to issue a redirect to "+originalSourceURL); } - return originalSourceURL; + return null; } return permissionsWrapper.notFound(); From c5f4ca46b6d384965c80926bce199f64f80d1af3 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 29 Jan 2024 10:33:34 -0500 Subject: [PATCH 0596/1112] remove "dev_" from container names #10238 --- docker/compose/demo/compose.yml | 36 ++++++++++++++++----------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index b72d06951e8..09dde63d5f4 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -2,8 +2,8 @@ version: "2.4" services: - dev_dataverse: - container_name: "dev_dataverse" + dataverse: + container_name: "dataverse" hostname: dataverse image: gdcc/dataverse:alpha restart: on-failure @@ -25,9 +25,9 @@ services: networks: - dataverse depends_on: - - dev_postgres - - dev_solr - - dev_dv_initializer + - postgres + - solr + - dv_initializer volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets @@ -38,8 +38,8 @@ services: mem_reservation: 1024m privileged: false - dev_bootstrap: - container_name: "dev_bootstrap" + bootstrap: + container_name: "bootstrap" image: gdcc/configbaker:alpha restart: "no" command: @@ -48,8 +48,8 @@ services: networks: - dataverse - dev_dv_initializer: - container_name: "dev_dv_initializer" + dv_initializer: + container_name: "dv_initializer" image: gdcc/configbaker:alpha restart: "no" command: @@ -59,8 +59,8 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - dev_postgres: - container_name: "dev_postgres" + postgres: + container_name: "postgres" hostname: postgres image: postgres:13 restart: on-failure @@ -74,8 +74,8 @@ services: volumes: - ./docker-dev-volumes/postgresql/data:/var/lib/postgresql/data - dev_solr_initializer: - container_name: "dev_solr_initializer" + solr_initializer: + container_name: "solr_initializer" image: gdcc/configbaker:alpha restart: "no" command: @@ -86,12 +86,12 @@ services: - ./docker-dev-volumes/solr/data:/var/solr - ./docker-dev-volumes/solr/conf:/solr-template - dev_solr: - container_name: "dev_solr" + solr: + container_name: "solr" hostname: "solr" image: solr:9.3.0 depends_on: - - dev_solr_initializer + - solr_initializer restart: on-failure ports: - "8983:8983" @@ -105,8 +105,8 @@ services: - ./docker-dev-volumes/solr/data:/var/solr - ./docker-dev-volumes/solr/conf:/template - dev_smtp: - container_name: "dev_smtp" + smtp: + container_name: "smtp" hostname: "smtp" image: maildev/maildev:2.0.5 restart: on-failure From c0cda028c3ce0922f51c670917d94ef22cab61c5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 29 Jan 2024 10:39:14 -0500 Subject: [PATCH 0597/1112] rename docker-dev-volumes to data #10238 --- .../source/container/running/demo.rst | 2 +- docker/compose/demo/.gitignore | 1 + docker/compose/demo/compose.yml | 18 +++++++++--------- 3 files changed, 11 insertions(+), 10 deletions(-) create mode 100644 docker/compose/demo/.gitignore diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 0ad1e50442f..5eda108c842 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -84,7 +84,7 @@ If you no longer need the containers because your demo or evaluation is finished Deleting the Data Directory +++++++++++++++++++++++++++ -Data related to the Dataverse containers is placed in a directory called ``docker-dev-volumes`` next to the ``compose.yml`` file. If you are finished with your demo or evaluation or you want to start fresh, simply delete this directory. +Data related to the Dataverse containers is placed in a directory called ``data`` next to the ``compose.yml`` file. If you are finished with your demo or evaluation or you want to start fresh, simply delete this directory. Configuration ------------- diff --git a/docker/compose/demo/.gitignore b/docker/compose/demo/.gitignore new file mode 100644 index 00000000000..1269488f7fb --- /dev/null +++ b/docker/compose/demo/.gitignore @@ -0,0 +1 @@ +data diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 09dde63d5f4..3817921f10a 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -29,8 +29,8 @@ services: - solr - dv_initializer volumes: - - ./docker-dev-volumes/app/data:/dv - - ./docker-dev-volumes/app/secrets:/secrets + - ./data/app/data:/dv + - ./data/app/secrets:/secrets tmpfs: - /dumps:mode=770,size=2052M,uid=1000,gid=1000 - /tmp:mode=770,size=2052M,uid=1000,gid=1000 @@ -57,7 +57,7 @@ services: - -c - "fix-fs-perms.sh dv" volumes: - - ./docker-dev-volumes/app/data:/dv + - ./data/app/data:/dv postgres: container_name: "postgres" @@ -72,7 +72,7 @@ services: networks: - dataverse volumes: - - ./docker-dev-volumes/postgresql/data:/var/lib/postgresql/data + - ./data/postgresql/data:/var/lib/postgresql/data solr_initializer: container_name: "solr_initializer" @@ -83,8 +83,8 @@ services: - -c - "fix-fs-perms.sh solr && cp -a /template/* /solr-template" volumes: - - ./docker-dev-volumes/solr/data:/var/solr - - ./docker-dev-volumes/solr/conf:/solr-template + - ./data/solr/data:/var/solr + - ./data/solr/conf:/solr-template solr: container_name: "solr" @@ -102,8 +102,8 @@ services: - "collection1" - "/template" volumes: - - ./docker-dev-volumes/solr/data:/var/solr - - ./docker-dev-volumes/solr/conf:/template + - ./data/solr/data:/var/solr + - ./data/solr/conf:/template smtp: container_name: "smtp" @@ -119,7 +119,7 @@ services: networks: - dataverse #volumes: - # - ./docker-dev-volumes/smtp/data:/mail + # - ./data/smtp/data:/mail tmpfs: - /mail:mode=770,size=128M,uid=1000,gid=1000 From d275a6343c0b7d0b296e8dc2d3c158afdd980058 Mon Sep 17 00:00:00 2001 From: raravumich <48064835+raravumich@users.noreply.github.com> Date: Mon, 29 Jan 2024 10:42:23 -0500 Subject: [PATCH 0598/1112] Add TurboCurator to External Tools list --- .../source/_static/admin/dataverse-external-tools.tsv | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 4f4c29d0670..a20ab864d2a 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -5,3 +5,4 @@ Binder explore dataset Binder allows you to spin up custom computing environment File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. +TurboCurator by ICPSR configure dataset "TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI’s ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation." From 3b2a9eb367c8af675a5ee67b83bebd43ff4fffec Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 30 Jan 2024 10:15:26 -0500 Subject: [PATCH 0599/1112] bump solrj per qqmyers to address zookeeper dependency --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index f45e8fd9033..e8e1af188ff 100644 --- a/pom.xml +++ b/pom.xml @@ -283,7 +283,7 @@ org.apache.solr solr-solrj - 9.3.0 + 9.4.1 colt From 1ea4db3f3c011dc8ea28d9eb656e423fdccfccd9 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 30 Jan 2024 13:02:34 -0500 Subject: [PATCH 0600/1112] a checklist for making a core field allowMultiples for the dev. guide #9634 --- doc/sphinx-guides/source/developers/index.rst | 1 + .../source/developers/metadatablocksdev.rst | 26 +++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 doc/sphinx-guides/source/developers/metadatablocksdev.rst diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index 25fea138736..25007baf589 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -31,6 +31,7 @@ Developer Guide making-releases making-library-releases metadataexport + metadatablocksdev tools unf/index make-data-count diff --git a/doc/sphinx-guides/source/developers/metadatablocksdev.rst b/doc/sphinx-guides/source/developers/metadatablocksdev.rst new file mode 100644 index 00000000000..17093471467 --- /dev/null +++ b/doc/sphinx-guides/source/developers/metadatablocksdev.rst @@ -0,0 +1,26 @@ +=========================== +Metadata Blocks Development +=========================== + +.. contents:: |toctitle| + :local: + +Introduction +------------ + +The idea behind Metadata Blocks in Dataverse is to have everything about the supported metadata fields configurable and customizable. Ideally, this should be accomplished by simply re-importing the updated tsv for the block via the API. In practice, when it comes to the core blocks that are distributed with Dataverse - such as the Citation and Social Science blocks - unfortunately, many dependencies exist in various parts of Dataverse, primarily import and export subsystems, on many specific fields being configured a certain way. This means that code changes may be required whenever a field from one of these core blocks is modified. + +Making a Field Multiple +----------------------- + +Back in 2023, in order to accommodate specific needs of some community member institutions a few fields from Citation and Social Science were changed to support multiple values. (For example, the ``alternativeTitle`` field from the Citation block.) A number of code changes had to be made to accommodate this, plus a number of changes in the sample metadata files that are maintained in the Dataverse code tree. The checklist below is to help another developer should a similar change become necessary in the future. Note that some of the steps below may not apply 1:1 to a different metadata field, depending on how it is exported and imported in various formats by Dataverse. It may help to consult the PR `#9440 `_ as a specific example of the changes that had to be made for the ``alternativeTitle`` field. + +- Change the value from ``FALSE`` to ``TRUE`` in the ``alowmultiples`` column of the .tsv file for the block (obviously). +- Change the value of the ``multiValued`` attribute for the search field in the Solr schema (``conf/solr/9.3.0/schema.xml`` as of writing this). +- Modify the DDI import code (``ImportDDIServiceBean.java``) to support multiple values. (you may be able to use the change in the PR above as a model.) +- Modify the DDI export utility (``DdiExportUtil.java``). +- Modify the OpenAire export utility (``OpenAireExportUtil.java``). +- Modify the following JSON source files in the Dataverse code tree to actually include multiple values for the field (two should be quite enough!): ``scripts/api/data/dataset-create-new-all-default-fields.json``, ``src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt``, ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json``. (These are used as examples for populating datasets via the import API and by the automated import and export code tests). +- Similarly modify the following XML files that are used by the DDI export code tests: ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml``. +- Make sure all the automated Unit and Integration tests are passing. +- Write a short release note to announce the change in the upcoming release. From 2eeda3d910ed128176c75b290c651252722dd919 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 30 Jan 2024 13:08:58 -0500 Subject: [PATCH 0601/1112] add sleep to SwordIT per qqmyers --- src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java index 39156f1c59b..4df6c89411d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java @@ -855,7 +855,7 @@ public void testDeleteFiles() { List oneFileLeftInV2Draft = statement3.getBody().xmlPath().getList("feed.entry.id"); logger.info("Number of files remaining in this post version 1 draft:" + oneFileLeftInV2Draft.size()); assertEquals(1, oneFileLeftInV2Draft.size()); - + UtilIT.sleepForLock(datasetPersistentId, "EditInProgress", apiToken, UtilIT.MAXIMUM_PUBLISH_LOCK_DURATION); Response deleteIndex1b = UtilIT.deleteFile(Integer.parseInt(index1b), apiToken); deleteIndex1b.then().assertThat() .statusCode(NO_CONTENT.getStatusCode()); From e4776101e8507a4b470b58ec70e90046516e4fa4 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 30 Jan 2024 13:16:11 -0500 Subject: [PATCH 0602/1112] linked the dev. checklist in the metadata customization section of the admin guide. #9634 --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 4f737bd730b..36956567a7d 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -648,6 +648,11 @@ Alternatively, you are welcome to request "edit" access to this "Tips for Datave The thinking is that the tips can become issues and the issues can eventually be worked on as features to improve the Dataverse Software metadata system. +Development Tasks Specific to Changing Fields in Core Metadata Blocks +--------------------------------------------------------------------- + +When it comes to the fields from the core blocks that are distributed with Dataverse (such as Citation and Social Science blocks), code dependencies may exist in Dataverse, primarily in the Import and Export subsystems, on these fields being configured a certain way. So, if it becomes necessary to modify one of such core fields (a real life example is making a single value-only field support multiple values), code changes may be necessary to accompany the change in the block tsv, plus some sample and test files maintained in the Dataverse source tree will need to be adjusted accordingly. An example of a checklist of such tasks is provided in the Development Guide, please see the :doc:`/developers/metadatablocksdev` section. + Footnotes --------- From d960b980f926ba3e1d8ed0336ef3d541ddc6fb50 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 30 Jan 2024 16:01:55 -0500 Subject: [PATCH 0603/1112] #9748 comment out disabled test --- src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 6f0aa499dd1..2c96ce96dea 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -432,6 +432,7 @@ public void createToolShellScript() { @Disabled @Test public void createToolDataExplorer() { + /* JsonObjectBuilder job = Json.createObjectBuilder(); job.add("displayName", "Data Explorer"); job.add("description", ""); @@ -466,6 +467,7 @@ public void createToolDataExplorer() { Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); deleteExternalTool.then().assertThat() .statusCode(OK.getStatusCode()); + */ } // both preview and explore From 9b0a3cf2f0c5a6337aaed925ff640651fecf6116 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 30 Jan 2024 16:50:07 -0500 Subject: [PATCH 0604/1112] rewrite demo page as a tutorial #10238 Also, explain how to create a persona and some basic config. --- .../source/container/running/demo.rst | 169 +++++++++++------- docker/compose/demo/compose.yml | 4 + .../scripts/bootstrap/demo/init.sh | 13 ++ 3 files changed, 126 insertions(+), 60 deletions(-) create mode 100644 modules/container-configbaker/scripts/bootstrap/demo/init.sh diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 5eda108c842..4e2a9db3f48 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -1,7 +1,7 @@ Demo or Evaluation ================== -If you would like to demo or evaluate Dataverse running in containers, you're in the right place. Your feedback is extremely valuable to us! To let us know what you think, please see :ref:`helping-containers`. +In the following tutorial we'll walk through spinning up Dataverse in containers for demo or evaluation purposes. .. contents:: |toctitle| :local: @@ -9,6 +9,8 @@ If you would like to demo or evaluate Dataverse running in containers, you're in Quickstart ---------- +First, let's confirm that we can get Dataverse running on your system. + - Download :download:`compose.yml <../../../../../docker/compose/demo/compose.yml>` - Run ``docker compose up`` in the directory where you put ``compose.yml`` - Visit http://localhost:8080 and try logging in: @@ -16,106 +18,138 @@ Quickstart - username: dataverseAdmin - password: admin1 -Hardware and Software Requirements ------------------------------------ +If you can log in, great! Please continue through the tutorial. If you have any trouble, please consult the sections below on troubleshooting and getting help. -- 8 GB RAM (if not much else is running) -- Mac, Linux, or Windows (experimental) -- Docker +Stopping and Starting the Containers +------------------------------------ -Windows support is experimental but we are very interested in supporting Windows better. Please report bugs (see :ref:`helping-containers`). +Let's practice stopping the containers and starting them up again. Your data, stored in a directory called ``data``, will remain intact -Tags and Versions ------------------ +To stop the containers hit ``Ctrl-c`` (hold down the ``Ctrl`` key and then hit the ``c`` key). -The compose file references a tag called "alpha", which corresponds to the latest released version of Dataverse. This means that if a release of Dataverse comes out while you are demo'ing or evaluating, the version of Dataverse you are using could change. We are aware that there is a desire for tags that correspond to versions to ensure consistency. You are welcome to join `the discussion `_ and otherwise get in touch (see :ref:`helping-containers`). For more on tags, see :ref:`supported-image-tags-app`. +To start the containers, run ``docker compose up``. -Once Dataverse is running, you can check which version you have through the normal methods: +Deleting Data and Starting Over +------------------------------- -- Check the bottom right in a web browser. -- Check http://localhost:8080/api/info/version via API. +Again, data related to your Dataverse installation such as the database is stored in a directory called ``data`` that gets created in the directory where you ran ``docker compose`` commands. -About the Containers --------------------- +You may reach a point during your demo or evaluation that you'd like to start over with a fresh database. Simply make sure the containers are not running and then remove the ``data`` directory. Now, as before, you can run ``docker compose up`` to spin up the containers. -If you run ``docker ps``, you'll see that multiple containers are spun up in a demo or evaluation. Here are the most important ones: +Configuring Dataverse +--------------------- -- dataverse -- postgres -- solr -- smtp -- bootstrap +Now that you are familiar with the basics of running Dataverse in containers, let's move on to configuration. -Most are self-explanatory, and correspond to components listed under :doc:`/installation/prerequisites` in the (traditional) Installation Guide, but "bootstrap" refers to :doc:`../configbaker-image`. +Start Fresh ++++++++++++ -Additional containers are used in development (see :doc:`../dev-usage`), but for the purposes of a demo or evaluation, fewer moving (sometimes pointy) parts are included. +For this configuration exercise, please start fresh by stopping all containers and removing the ``data`` directory. -Security --------- +Change the Site URL ++++++++++++++++++++ -Please be aware that for now, the "dev" persona is used to bootstrap Dataverse, which means that admin APIs are wide open (to allow developers to test them; see :ref:`securing-your-installation` for more on API blocking), the "create user" key is set to a default value, etc. You can inspect the dev person `on GitHub `_ (look for ``--insecure``). +Edit ``compose.yml`` and change ``_CT_DATAVERSE_SITEURL`` to the URL you plan to use for your installation. -We plan to ship a "demo" persona but it is not ready yet. See also :ref:`configbaker-personas`. +(You can read more about this setting at :ref:`dataverse.siteUrl`.) -Common Operations ------------------ +This is an example of setting an environment variable to configure Dataverse. -Starting the Containers -+++++++++++++++++++++++ +Create and Run a Demo Persona ++++++++++++++++++++++++++++++ -First, download :download:`compose.yml <../../../../../docker/compose/demo/compose.yml>` and place it somewhere you'll remember. +Previously we used the "dev" persona to bootstrap Dataverse, but for security reasons, we should create a persona more suited to demos and evaluations. -Then, run ``docker compose up`` in the directory where you put ``compose.yml`` +Edit the ``compose.yml`` file and look for the following section. -Starting the containers for the first time involves a bootstrap process. You should see "have a nice day" output at the end. +.. code-block:: bash -Stopping the Containers -+++++++++++++++++++++++ + bootstrap: + container_name: "bootstrap" + image: gdcc/configbaker:alpha + restart: "no" + command: + - bootstrap.sh + - dev + #- demo + #volumes: + # - ./demo:/scripts/bootstrap/demo + networks: + - dataverse -You might want to stop the containers if you aren't using them. Hit ``Ctrl-c`` (hold down the ``Ctrl`` key and then hit the ``c`` key). +Comment out "dev" and uncomment "demo". -You data is still intact and you can start the containers again with ``docker compose up``. +Uncomment the "volumes" section. -Deleting the Containers -+++++++++++++++++++++++ +Create a directory called "demo" and copy :download:`init.sh <../../../../../modules/container-configbaker/scripts/bootstrap/demo/init.sh>` into it. You are welcome to edit this demo init script, customizing the final message, for example. -If you no longer need the containers because your demo or evaluation is finished and you want to reclaim disk space, run ``docker compose down`` in the directory where you put ``compose.yml``. +Now run ``docker compose up``. The "bootstrap" container should exit with the message from the init script and Dataverse should be running on http://localhost:8080 as before during the quickstart exercise. -Deleting the Data Directory -+++++++++++++++++++++++++++ +One of the main differences between the "dev" persona and our new "demo" persona is that we are now running the setup-all script without the ``--insecure`` flag. This makes our installation more secure, though it does block "admin" APIs that are useful for configuration. -Data related to the Dataverse containers is placed in a directory called ``data`` next to the ``compose.yml`` file. If you are finished with your demo or evaluation or you want to start fresh, simply delete this directory. +Set DOI Provider to FAKE +++++++++++++++++++++++++ -Configuration -------------- +For the purposes of a demo, we'll use the "FAKE" DOI provider. (For more on this and related settings, see :ref:`pids-configuration` in the Installation Guide.) Without this step, you won't be able to create or publish datasets. -Configuration is described in greater detail under :doc:`/installation/config` in the Installation Guide, but there are some specifics to running in containers you should know about. +Run the following command. (In this context, "dataverse" is the name of the running container.) -.. _configbaker-personas: +``docker exec -it dataverse curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE`` -Personas -++++++++ +This is an example of configuring a database setting, which you can read more about at :ref:`database-settings` in the Installation Guide. -When the containers are bootstrapped, the "dev" persona is used. In the future we plan to add a "demo" persona that is more suited to demo and evaluation use cases. +Smoke Test +---------- -Database Settings -+++++++++++++++++ +At this point, please try some basic operations within your installation, such as: -Updating database settings is the same as described under :ref:`database-settings` in the Installation Guide. +- logging in as dataverseAdmin +- publishing the "root" collection (dataverse) +- creating a collection +- creating a dataset +- uploading a data file +- publishing the dataset -MPCONFIG Options -++++++++++++++++ +About the Containers +-------------------- -The compose file contains an ``environment`` section with various MicroProfile Config (MPCONFIG) options. You can experiment with this by adding ``DATAVERSE_VERSION: foobar`` to change the (displayed) version of Dataverse to "foobar". +Container List +++++++++++++++ -JVM Options -+++++++++++ +If you run ``docker ps``, you'll see that multiple containers are spun up in a demo or evaluation. Here are the most important ones: -JVM options are not especially easy to change in the container. The general process is to get a shell on the "dataverse" container, change the settings, and then stop and start the containers. See :ref:`jvm-options` for more. +- dataverse +- postgres +- solr +- smtp +- bootstrap + +Most are self-explanatory, and correspond to components listed under :doc:`/installation/prerequisites` in the (traditional) Installation Guide, but "bootstrap" refers to :doc:`../configbaker-image`. + +Additional containers are used in development (see :doc:`../dev-usage`), but for the purposes of a demo or evaluation, fewer moving (sometimes pointy) parts are included. + +Tags and Versions ++++++++++++++++++ + +The compose file references a tag called "alpha", which corresponds to the latest released version of Dataverse. This means that if a release of Dataverse comes out while you are demo'ing or evaluating, the version of Dataverse you are using could change if you do a ``docker pull``. We are aware that there is a desire for tags that correspond to versions to ensure consistency. You are welcome to join `the discussion `_ and otherwise get in touch (see :ref:`helping-containers`). For more on tags, see :ref:`supported-image-tags-app`. + +Once Dataverse is running, you can check which version you have through the normal methods: + +- Check the bottom right in a web browser. +- Check http://localhost:8080/api/info/version via API. Troubleshooting --------------- +Hardware and Software Requirements +++++++++++++++++++++++++++++++++++ + +- 8 GB RAM (if not much else is running) +- Mac, Linux, or Windows (experimental) +- Docker + +Windows support is experimental but we are very interested in supporting Windows better. Please report bugs (see :ref:`helping-containers`). + Bootstrapping Did Not Complete ++++++++++++++++++++++++++++++ @@ -126,6 +160,21 @@ In the compose file, try increasing the timeout in the bootstrap container by ad environment: - TIMEOUT=10m +Wrapping Up +----------- + +Deleting the Containers and Data +++++++++++++++++++++++++++++++++ + +If you no longer need the containers because your demo or evaluation is finished and you want to reclaim disk space, run ``docker compose down`` in the directory where you put ``compose.yml``. + +You might also want to delete the ``data`` directory, as described above. + +Giving Feedback +--------------- + +Your feedback is extremely valuable to us! To let us know what you think, please see :ref:`helping-containers`. + Getting Help ------------ diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 3817921f10a..a262f43006a 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -9,6 +9,7 @@ services: restart: on-failure user: payara environment: + _CT_DATAVERSE_SITEURL: "https://demo.example.org" DATAVERSE_DB_HOST: postgres DATAVERSE_DB_PASSWORD: secret DATAVERSE_DB_USER: dataverse @@ -45,6 +46,9 @@ services: command: - bootstrap.sh - dev + #- demo + #volumes: + # - ./demo:/scripts/bootstrap/demo networks: - dataverse diff --git a/modules/container-configbaker/scripts/bootstrap/demo/init.sh b/modules/container-configbaker/scripts/bootstrap/demo/init.sh new file mode 100644 index 00000000000..0e9be7ffef5 --- /dev/null +++ b/modules/container-configbaker/scripts/bootstrap/demo/init.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -euo pipefail + +# Set some defaults as documented +DATAVERSE_URL=${DATAVERSE_URL:-"http://dataverse:8080"} +export DATAVERSE_URL + +echo "Running base setup-all.sh..." +"${BOOTSTRAP_DIR}"/base/setup-all.sh -p=admin1 | tee /tmp/setup-all.sh.out + +echo "" +echo "Done, your instance has been configured for demo or eval. Have a nice day!" From bdc2c8e980ac9878ef472f874098e4f25431592b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 31 Jan 2024 10:05:04 -0500 Subject: [PATCH 0605/1112] #9748 avoid issue with existing tools --- .../edu/harvard/iq/dataverse/api/TestApi.java | 26 +++++++++++++++++++ .../iq/dataverse/api/ExternalToolsIT.java | 15 ++++++----- .../edu/harvard/iq/dataverse/api/UtilIT.java | 15 +++++++++++ 3 files changed, 50 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java index 87be1f14e05..10510013495 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java @@ -71,5 +71,31 @@ public Response getExternalToolsForFile(@PathParam("id") String idSupplied, @Que return wr.getResponse(); } } + + @Path("files/{id}/externalTool/{toolId}") + @GET + public Response getExternalToolForFileById(@PathParam("id") String idSupplied, @QueryParam("type") String typeSupplied, @PathParam("toolId") String toolId) { + ExternalTool.Type type; + try { + type = ExternalTool.Type.fromString(typeSupplied); + } catch (IllegalArgumentException ex) { + return error(BAD_REQUEST, ex.getLocalizedMessage()); + } + try { + DataFile dataFile = findDataFileOrDie(idSupplied); + List datasetTools = externalToolService.findFileToolsByTypeAndContentType(type, dataFile.getContentType()); + for (ExternalTool tool : datasetTools) { + ApiToken apiToken = externalToolService.getApiToken(getRequestApiKey()); + ExternalToolHandler externalToolHandler = new ExternalToolHandler(tool, dataFile, apiToken, dataFile.getFileMetadata(), null); + JsonObjectBuilder toolToJson = externalToolService.getToolAsJsonWithQueryParameters(externalToolHandler); + if (externalToolService.meetsRequirements(tool, dataFile) && tool.getId().toString().equals(toolId)) { + return ok(toolToJson); + } + } + return error(BAD_REQUEST, "Could not find external tool with id of " + toolId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 2c96ce96dea..9a280f475a1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -101,7 +101,7 @@ public void testFileLevelTool1() { .statusCode(OK.getStatusCode()) .body("data.displayName", CoreMatchers.equalTo("AwesomeTool")); - long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); + Long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); Response getTool = UtilIT.getExternalTool(toolId); getTool.prettyPrint(); @@ -115,14 +115,17 @@ public void testFileLevelTool1() { .statusCode(BAD_REQUEST.getStatusCode()) .body("message", CoreMatchers.equalTo("Type must be one of these values: [explore, configure, preview, query].")); - Response getExternalToolsForTabularFiles = UtilIT.getExternalToolsForFile(tabularFileId.toString(), "explore", apiToken); + // Getting tool by tool Id to avoid issue where there are existing tools + String toolIdString = toolId.toString(); + Response getExternalToolsForTabularFiles = UtilIT.getExternalToolForFileById(tabularFileId.toString(), "explore", apiToken, toolIdString); getExternalToolsForTabularFiles.prettyPrint(); + getExternalToolsForTabularFiles.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data[0].displayName", CoreMatchers.equalTo("AwesomeTool")) - .body("data[0].scope", CoreMatchers.equalTo("file")) - .body("data[0].contentType", CoreMatchers.equalTo("text/tab-separated-values")) - .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("http://awesometool.com?fileid=" + tabularFileId + "&key=" + apiToken)); + .body("data.displayName", CoreMatchers.equalTo("AwesomeTool")) + .body("data.scope", CoreMatchers.equalTo("file")) + .body("data.contentType", CoreMatchers.equalTo("text/tab-separated-values")) + .body("data.toolUrlWithQueryParams", CoreMatchers.equalTo("http://awesometool.com?fileid=" + tabularFileId + "&key=" + apiToken)); Response getExternalToolsForJuptyerNotebooks = UtilIT.getExternalToolsForFile(jupyterNotebookFileId.toString(), "explore", apiToken); getExternalToolsForJuptyerNotebooks.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 6af3f8a0a09..ec41248a65f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2354,6 +2354,21 @@ static Response getExternalToolsForFile(String idOrPersistentIdOfFile, String ty } return requestSpecification.get("/api/admin/test/files/" + idInPath + "/externalTools?type=" + type + optionalQueryParam); } + + static Response getExternalToolForFileById(String idOrPersistentIdOfFile, String type, String apiToken, String toolId) { + String idInPath = idOrPersistentIdOfFile; // Assume it's a number. + String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. + if (!NumberUtils.isCreatable(idOrPersistentIdOfFile)) { + idInPath = ":persistentId"; + optionalQueryParam = "&persistentId=" + idOrPersistentIdOfFile; + } + RequestSpecification requestSpecification = given(); + if (apiToken != null) { + requestSpecification = given() + .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); + } + return requestSpecification.get("/api/admin/test/files/" + idInPath + "/externalTool/" + toolId + "?type=" + type + optionalQueryParam); + } static Response submitFeedback(JsonObjectBuilder job) { return given() From 7d537aa394c447562820cf0343fd6ec2d8a760ca Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 31 Jan 2024 17:45:01 -0500 Subject: [PATCH 0606/1112] simplified/reorganized the new dev. checklist for making a core field multiple #9634 --- .../source/admin/metadatacustomization.rst | 19 +++++++++++++- doc/sphinx-guides/source/developers/index.rst | 1 - .../source/developers/metadatablocksdev.rst | 26 ------------------- 3 files changed, 18 insertions(+), 28 deletions(-) delete mode 100644 doc/sphinx-guides/source/developers/metadatablocksdev.rst diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 36956567a7d..f97b222b51f 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -651,7 +651,24 @@ The thinking is that the tips can become issues and the issues can eventually be Development Tasks Specific to Changing Fields in Core Metadata Blocks --------------------------------------------------------------------- -When it comes to the fields from the core blocks that are distributed with Dataverse (such as Citation and Social Science blocks), code dependencies may exist in Dataverse, primarily in the Import and Export subsystems, on these fields being configured a certain way. So, if it becomes necessary to modify one of such core fields (a real life example is making a single value-only field support multiple values), code changes may be necessary to accompany the change in the block tsv, plus some sample and test files maintained in the Dataverse source tree will need to be adjusted accordingly. An example of a checklist of such tasks is provided in the Development Guide, please see the :doc:`/developers/metadatablocksdev` section. +When it comes to the fields from the core blocks that are distributed with Dataverse (such as Citation, Social Science and Geospatial blocks), code dependencies may exist in Dataverse, primarily in the Import and Export subsystems, on these fields being configured a certain way. So, if it becomes necessary to modify one of such core fields, code changes may be necessary to accompany the change in the block tsv, plus some sample and test files maintained in the Dataverse source tree will need to be adjusted accordingly. + +Making a Field Multi-Valued +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +As a recent real life example, a few fields from the Citation and Social Science block were changed to support multiple values, in order to accommodate specific needs of some community member institutions. A PR for one of these fields, ``alternativeTitle`` from the Citation block is linked below. Each time a number of code changes, plus some changes in the sample metadata files in the Dataverse code tree had to be made. The checklist below is to help another developer in the event that a similar change becomes necessary in the future. Note that some of the steps below may not apply 1:1 to a different metadata field, depending on how it is exported and imported in various formats by Dataverse. It may help to consult the PR `#9440 `_ as a specific example of the changes that had to be made for the ``alternativeTitle`` field. + +- Change the value from ``FALSE`` to ``TRUE`` in the ``alowmultiples`` column of the .tsv file for the block. +- Change the value of the ``multiValued`` attribute for the search field in the Solr schema (``conf/solr/9.3.0/schema.xml`` as of writing this). +- Modify the DDI import code (``ImportDDIServiceBean.java``) to support multiple values. (you may be able to use the change in the PR above as a model.) +- Modify the DDI export utility (``DdiExportUtil.java``). +- Modify the OpenAire export utility (``OpenAireExportUtil.java``). +- Modify the following JSON source files in the Dataverse code tree to actually include multiple values for the field (two should be quite enough!): ``scripts/api/data/dataset-create-new-all-default-fields.json``, ``src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt``, ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json``. (These are used as examples for populating datasets via the import API and by the automated import and export code tests). +- Similarly modify the following XML files that are used by the DDI export code tests: ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml``. +- Make sure all the automated Unit and Integration tests are passing. +- Write a short release note to announce the change in the upcoming release. +- Make a Pull Request. + Footnotes --------- diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index 25007baf589..25fea138736 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -31,7 +31,6 @@ Developer Guide making-releases making-library-releases metadataexport - metadatablocksdev tools unf/index make-data-count diff --git a/doc/sphinx-guides/source/developers/metadatablocksdev.rst b/doc/sphinx-guides/source/developers/metadatablocksdev.rst deleted file mode 100644 index 17093471467..00000000000 --- a/doc/sphinx-guides/source/developers/metadatablocksdev.rst +++ /dev/null @@ -1,26 +0,0 @@ -=========================== -Metadata Blocks Development -=========================== - -.. contents:: |toctitle| - :local: - -Introduction ------------- - -The idea behind Metadata Blocks in Dataverse is to have everything about the supported metadata fields configurable and customizable. Ideally, this should be accomplished by simply re-importing the updated tsv for the block via the API. In practice, when it comes to the core blocks that are distributed with Dataverse - such as the Citation and Social Science blocks - unfortunately, many dependencies exist in various parts of Dataverse, primarily import and export subsystems, on many specific fields being configured a certain way. This means that code changes may be required whenever a field from one of these core blocks is modified. - -Making a Field Multiple ------------------------ - -Back in 2023, in order to accommodate specific needs of some community member institutions a few fields from Citation and Social Science were changed to support multiple values. (For example, the ``alternativeTitle`` field from the Citation block.) A number of code changes had to be made to accommodate this, plus a number of changes in the sample metadata files that are maintained in the Dataverse code tree. The checklist below is to help another developer should a similar change become necessary in the future. Note that some of the steps below may not apply 1:1 to a different metadata field, depending on how it is exported and imported in various formats by Dataverse. It may help to consult the PR `#9440 `_ as a specific example of the changes that had to be made for the ``alternativeTitle`` field. - -- Change the value from ``FALSE`` to ``TRUE`` in the ``alowmultiples`` column of the .tsv file for the block (obviously). -- Change the value of the ``multiValued`` attribute for the search field in the Solr schema (``conf/solr/9.3.0/schema.xml`` as of writing this). -- Modify the DDI import code (``ImportDDIServiceBean.java``) to support multiple values. (you may be able to use the change in the PR above as a model.) -- Modify the DDI export utility (``DdiExportUtil.java``). -- Modify the OpenAire export utility (``OpenAireExportUtil.java``). -- Modify the following JSON source files in the Dataverse code tree to actually include multiple values for the field (two should be quite enough!): ``scripts/api/data/dataset-create-new-all-default-fields.json``, ``src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt``, ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json``. (These are used as examples for populating datasets via the import API and by the automated import and export code tests). -- Similarly modify the following XML files that are used by the DDI export code tests: ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml``. -- Make sure all the automated Unit and Integration tests are passing. -- Write a short release note to announce the change in the upcoming release. From ad12c7f2ddaf4f6fb1ec5023845d98092df0da47 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 1 Feb 2024 12:28:06 -0500 Subject: [PATCH 0607/1112] Apply suggestions from code review --- .../source/admin/metadatacustomization.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index f97b222b51f..841dfd8b3cd 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -658,16 +658,16 @@ Making a Field Multi-Valued As a recent real life example, a few fields from the Citation and Social Science block were changed to support multiple values, in order to accommodate specific needs of some community member institutions. A PR for one of these fields, ``alternativeTitle`` from the Citation block is linked below. Each time a number of code changes, plus some changes in the sample metadata files in the Dataverse code tree had to be made. The checklist below is to help another developer in the event that a similar change becomes necessary in the future. Note that some of the steps below may not apply 1:1 to a different metadata field, depending on how it is exported and imported in various formats by Dataverse. It may help to consult the PR `#9440 `_ as a specific example of the changes that had to be made for the ``alternativeTitle`` field. -- Change the value from ``FALSE`` to ``TRUE`` in the ``alowmultiples`` column of the .tsv file for the block. -- Change the value of the ``multiValued`` attribute for the search field in the Solr schema (``conf/solr/9.3.0/schema.xml`` as of writing this). -- Modify the DDI import code (``ImportDDIServiceBean.java``) to support multiple values. (you may be able to use the change in the PR above as a model.) +- Change the value from ``FALSE`` to ``TRUE`` in the ``allowmultiples`` column of the .tsv file for the block. +- Change the value of the ``multiValued`` attribute for the search field in the Solr schema (``conf/solr/x.x.x/schema.xml``). +- Modify the DDI import code (``ImportDDIServiceBean.java``) to support multiple values. (You may be able to use the change in the PR above as a model.) - Modify the DDI export utility (``DdiExportUtil.java``). - Modify the OpenAire export utility (``OpenAireExportUtil.java``). - Modify the following JSON source files in the Dataverse code tree to actually include multiple values for the field (two should be quite enough!): ``scripts/api/data/dataset-create-new-all-default-fields.json``, ``src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt``, ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json``. (These are used as examples for populating datasets via the import API and by the automated import and export code tests). - Similarly modify the following XML files that are used by the DDI export code tests: ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml``. -- Make sure all the automated Unit and Integration tests are passing. +- Make sure all the automated unit and integration tests are passing. - Write a short release note to announce the change in the upcoming release. -- Make a Pull Request. +- Make a pull request. Footnotes From e064313c4c11fbec2bf875d0f8dbe98b99013fca Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 1 Feb 2024 12:31:01 -0500 Subject: [PATCH 0608/1112] add refs to dev guide #9634 --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 841dfd8b3cd..5bd28bfa103 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -665,8 +665,8 @@ As a recent real life example, a few fields from the Citation and Social Science - Modify the OpenAire export utility (``OpenAireExportUtil.java``). - Modify the following JSON source files in the Dataverse code tree to actually include multiple values for the field (two should be quite enough!): ``scripts/api/data/dataset-create-new-all-default-fields.json``, ``src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt``, ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json``. (These are used as examples for populating datasets via the import API and by the automated import and export code tests). - Similarly modify the following XML files that are used by the DDI export code tests: ``src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml`` and ``src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml``. -- Make sure all the automated unit and integration tests are passing. -- Write a short release note to announce the change in the upcoming release. +- Make sure all the automated unit and integration tests are passing. See :doc:`/developers/testing` in the Developer Guide. +- Write a short release note to announce the change in the upcoming release. See :ref:`writing-release-note-snippets` in the Developer Guide. - Make a pull request. From 89739bc39542930546c807c2236033b7da790688 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 1 Feb 2024 16:37:58 -0500 Subject: [PATCH 0609/1112] use --insecure and secure later #10238 Using --insecure at first and then doing securing APIs, etc later (like non --insecure does) seems like the best option for now. It allows us to simplify the tutorial and set up an unblock key for later use. --- .../source/container/running/demo.rst | 96 +++++++++++++------ .../scripts/bootstrap/demo/init.sh | 30 +++++- 2 files changed, 94 insertions(+), 32 deletions(-) diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 4e2a9db3f48..24027e677a1 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -36,27 +36,18 @@ Again, data related to your Dataverse installation such as the database is store You may reach a point during your demo or evaluation that you'd like to start over with a fresh database. Simply make sure the containers are not running and then remove the ``data`` directory. Now, as before, you can run ``docker compose up`` to spin up the containers. -Configuring Dataverse +Setting Up for a Demo --------------------- -Now that you are familiar with the basics of running Dataverse in containers, let's move on to configuration. +Now that you are familiar with the basics of running Dataverse in containers, let's move on to a better setup for a demo or evaluation. -Start Fresh -+++++++++++ - -For this configuration exercise, please start fresh by stopping all containers and removing the ``data`` directory. - -Change the Site URL -+++++++++++++++++++ - -Edit ``compose.yml`` and change ``_CT_DATAVERSE_SITEURL`` to the URL you plan to use for your installation. - -(You can read more about this setting at :ref:`dataverse.siteUrl`.) +Starting Fresh +++++++++++++++ -This is an example of setting an environment variable to configure Dataverse. +For this exercise, please start fresh by stopping all containers and removing the ``data`` directory. -Create and Run a Demo Persona -+++++++++++++++++++++++++++++ +Creating and Running a Demo Persona ++++++++++++++++++++++++++++++++++++ Previously we used the "dev" persona to bootstrap Dataverse, but for security reasons, we should create a persona more suited to demos and evaluations. @@ -83,36 +74,81 @@ Uncomment the "volumes" section. Create a directory called "demo" and copy :download:`init.sh <../../../../../modules/container-configbaker/scripts/bootstrap/demo/init.sh>` into it. You are welcome to edit this demo init script, customizing the final message, for example. +Note that the init script contains a key for using the admin API once it is blocked. You should change it in the script from "unblockme" to something only you know. + Now run ``docker compose up``. The "bootstrap" container should exit with the message from the init script and Dataverse should be running on http://localhost:8080 as before during the quickstart exercise. One of the main differences between the "dev" persona and our new "demo" persona is that we are now running the setup-all script without the ``--insecure`` flag. This makes our installation more secure, though it does block "admin" APIs that are useful for configuration. -Set DOI Provider to FAKE -++++++++++++++++++++++++ +Smoke Testing +------------- + +At this point, please try the following basic operations within your installation: + +- logging in as dataverseAdmin (password "admin1") +- publishing the "root" collection (dataverse) +- creating a collection +- creating a dataset +- uploading a data file +- publishing the dataset + +If anything isn't working, please see the sections below on troubleshooting, giving feedback, and getting help. + +Further Configuration +--------------------- + +Now that we've verified through a smoke test that basic operations are working, let's configure our installation of Dataverse. + +Please refer to the :doc:`/installation/config` section of the Installation Guide for various configuration options. -For the purposes of a demo, we'll use the "FAKE" DOI provider. (For more on this and related settings, see :ref:`pids-configuration` in the Installation Guide.) Without this step, you won't be able to create or publish datasets. +Below we'll explain some specifics for configuration in containers. -Run the following command. (In this context, "dataverse" is the name of the running container.) +JVM Options/MicroProfile Config ++++++++++++++++++++++++++++++++ -``docker exec -it dataverse curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE`` +:ref:`jvm-options` can be configured under ``JVM_ARGS`` in the ``compose.yml`` file. Here's an example: + +.. code-block:: bash -This is an example of configuring a database setting, which you can read more about at :ref:`database-settings` in the Installation Guide. + environment: + JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 -Smoke Test +Some JVM options can be configured as environment variables. For example, you can configure the database host like this: + +.. code-block:: bash + + environment: + DATAVERSE_DB_HOST: postgres + +We are in the process of making more JVM options configurable as environment variables. Look for the term "MicroProfile Config" in under :doc:`/installation/config` in the Installation Guide to know if you can use them this way. + +Please note that for a few environment variables (the ones that start with ``%ct`` in :download:`microprofile-config.properties <../../../../../src/main/resources/META-INF/microprofile-config.properties>`), you have to prepend ``_CT_`` to make, for example, ``_CT_DATAVERSE_SITEURL``. We are working on a fix for this in https://github.com/IQSS/dataverse/issues/10285. + +There is a final way to configure JVM options that we plan to deprecate once all JVM options have been converted to MicroProfile Config. Look for "magic trick" under "tunables" at :doc:`../app-image` for more information. + +Database Settings ++++++++++++++++++ + +Generally, you should be able to look at the list of :ref:`database-settings` and configure them but the "demo" persona above secured your installation to the point that you'll need an "unblock key" to access the "admin" API and change database settings. + +In the example below of configuring :ref:`:FooterCopyright` we use the default unblock key of "unblockme" but you should use the key you set above. + +``curl -X PUT -d ", My Org" "http://localhost:8080/api/admin/settings/:FooterCopyright?unblock-key=unblockme"`` + +One you make this change it should be visible in the copyright in the bottom left of every page. + +Next Steps ---------- -At this point, please try some basic operations within your installation, such as: +From here, you are encouraged to continue poking around, configuring, and testing. You probably spend a lot of time reading the :doc:`/installation/config` section of the Installation Guide. -- logging in as dataverseAdmin -- publishing the "root" collection (dataverse) -- creating a collection -- creating a dataset -- uploading a data file -- publishing the dataset +Please consider giving feedback using the methods described below. Good luck with your demo! About the Containers -------------------- +Now that you've gone through the tutorial, you might be interested in the various containers you've spun up and what they do. + Container List ++++++++++++++ diff --git a/modules/container-configbaker/scripts/bootstrap/demo/init.sh b/modules/container-configbaker/scripts/bootstrap/demo/init.sh index 0e9be7ffef5..e8d1d07dd2d 100644 --- a/modules/container-configbaker/scripts/bootstrap/demo/init.sh +++ b/modules/container-configbaker/scripts/bootstrap/demo/init.sh @@ -2,12 +2,38 @@ set -euo pipefail -# Set some defaults as documented +# Set some defaults DATAVERSE_URL=${DATAVERSE_URL:-"http://dataverse:8080"} export DATAVERSE_URL +BLOCKED_API_KEY=${BLOCKED_API_KEY:-"unblockme"} +export BLOCKED_API_KEY + +# --insecure is used so we can configure a few things but +# later in this script we'll apply the changes as if we had +# run the script without --insecure. echo "Running base setup-all.sh..." -"${BOOTSTRAP_DIR}"/base/setup-all.sh -p=admin1 | tee /tmp/setup-all.sh.out +"${BOOTSTRAP_DIR}"/base/setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out + +echo "" +echo "Setting DOI provider to \"FAKE\"..." +curl -sS -X PUT -d FAKE "${DATAVERSE_URL}/api/admin/settings/:DoiProvider" + +echo "" +echo "Revoke the key that allows for creation of builtin users..." +curl -sS -X DELETE "${DATAVERSE_URL}/api/admin/settings/BuiltinUsers.KEY" + +echo "" +echo "Set key for accessing blocked API endpoints..." +curl -sS -X PUT -d "$BLOCKED_API_KEY" "${DATAVERSE_URL}/api/admin/settings/:BlockedApiKey" + +echo "" +echo "Set policy to only allow access to admin APIs with with a key..." +curl -sS -X PUT -d unblock-key "${DATAVERSE_URL}/api/admin/settings/:BlockedApiPolicy" + +echo "" +echo "Block admin and other sensitive API endpoints..." +curl -sS -X PUT -d 'admin,builtin-users' "${DATAVERSE_URL}/api/admin/settings/:BlockedApiEndpoints" echo "" echo "Done, your instance has been configured for demo or eval. Have a nice day!" From c8f71f16d41c83586bd4572fd2e4bcf9f8b3962b Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Fri, 2 Feb 2024 16:15:17 -0500 Subject: [PATCH 0610/1112] Update metadatacustomization.rst The /tree seems to be just a reference for the GitHub URL but the project doesn't have a "tree" directory so probably would be better or less confusing to reference the root of the project. Also the property files are in a different location than the one specified on the Documentation. --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 5bd28bfa103..c9cb3c47f85 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -37,8 +37,8 @@ tab-separated value (TSV). [1]_\ :sup:`,`\ [2]_ While it is technically possible to define more than one metadata block in a TSV file, it is good organizational practice to define only one in each file. -The metadata block TSVs shipped with the Dataverse Software are in `/tree/develop/scripts/api/data/metadatablocks -`__ and the corresponding ResourceBundle property files `/tree/develop/src/main/java `__ of the Dataverse Software GitHub repo. Human-readable copies are available in `this Google Sheets +The metadata block TSVs shipped with the Dataverse Software are in `/src/scripts/api/data/metadatablocks +`__ and the corresponding ResourceBundle property files `/src/main/java/propertyFiles `__ of the Dataverse Software GitHub repo. Human-readable copies are available in `this Google Sheets document `__ but they tend to get out of sync with the TSV files, which should be considered authoritative. The Dataverse Software installation process operates on the TSVs, not the Google spreadsheet. About the metadata block TSV From 2978080e5299d91d340ff926ec2a3a33a81b40df Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 2 Feb 2024 16:50:20 -0500 Subject: [PATCH 0611/1112] Update metadatacustomization.rst --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index c9cb3c47f85..78eadd9b2ce 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -37,8 +37,8 @@ tab-separated value (TSV). [1]_\ :sup:`,`\ [2]_ While it is technically possible to define more than one metadata block in a TSV file, it is good organizational practice to define only one in each file. -The metadata block TSVs shipped with the Dataverse Software are in `/src/scripts/api/data/metadatablocks -`__ and the corresponding ResourceBundle property files `/src/main/java/propertyFiles `__ of the Dataverse Software GitHub repo. Human-readable copies are available in `this Google Sheets +The metadata block TSVs shipped with the Dataverse Software are in `/scripts/api/data/metadatablocks +`__ with the corresponding ResourceBundle property files in `/src/main/java/propertyFiles `__ of the Dataverse Software GitHub repo. Human-readable copies are available in `this Google Sheets document `__ but they tend to get out of sync with the TSV files, which should be considered authoritative. The Dataverse Software installation process operates on the TSVs, not the Google spreadsheet. About the metadata block TSV From 24daf553ecdbc7811737da58d6a41b6294a98434 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Fri, 2 Feb 2024 16:53:24 -0500 Subject: [PATCH 0612/1112] Update metadatacustomization.rst As @qqmyers pointed these are not on /src --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index c9cb3c47f85..4920859d716 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -37,7 +37,7 @@ tab-separated value (TSV). [1]_\ :sup:`,`\ [2]_ While it is technically possible to define more than one metadata block in a TSV file, it is good organizational practice to define only one in each file. -The metadata block TSVs shipped with the Dataverse Software are in `/src/scripts/api/data/metadatablocks +The metadata block TSVs shipped with the Dataverse Software are in `/scripts/api/data/metadatablocks `__ and the corresponding ResourceBundle property files `/src/main/java/propertyFiles `__ of the Dataverse Software GitHub repo. Human-readable copies are available in `this Google Sheets document `__ but they tend to get out of sync with the TSV files, which should be considered authoritative. The Dataverse Software installation process operates on the TSVs, not the Google spreadsheet. From 7c248239c260e56c2c7e162b0ddfafda1af7d9f6 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 2 Feb 2024 19:12:59 -0500 Subject: [PATCH 0613/1112] Fix line break --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 7d6e0c4c5c1..f518c7eb802 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -40,7 +40,6 @@ good organizational practice to define only one in each file. The metadata block TSVs shipped with the Dataverse Software are in `/scripts/api/data/metadatablocks `__ with the corresponding ResourceBundle property files in `/src/main/java/propertyFiles `__ of the Dataverse Software GitHub repo. Human-readable copies are available in `this Google Sheets - document `__ but they tend to get out of sync with the TSV files, which should be considered authoritative. The Dataverse Software installation process operates on the TSVs, not the Google spreadsheet. About the metadata block TSV From 59f1560daa77404c602029e2112546b00f9f19f2 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 2 Feb 2024 19:16:02 -0500 Subject: [PATCH 0614/1112] Fix incorrect line break that cause build fail --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index f518c7eb802..78eadd9b2ce 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -38,7 +38,6 @@ possible to define more than one metadata block in a TSV file, it is good organizational practice to define only one in each file. The metadata block TSVs shipped with the Dataverse Software are in `/scripts/api/data/metadatablocks - `__ with the corresponding ResourceBundle property files in `/src/main/java/propertyFiles `__ of the Dataverse Software GitHub repo. Human-readable copies are available in `this Google Sheets document `__ but they tend to get out of sync with the TSV files, which should be considered authoritative. The Dataverse Software installation process operates on the TSVs, not the Google spreadsheet. From 77951683a2f495e04098125a81945dc076d80b4b Mon Sep 17 00:00:00 2001 From: raravumich <48064835+raravumich@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:33:46 -0500 Subject: [PATCH 0615/1112] added tabs --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index a20ab864d2a..05263498977 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -5,4 +5,4 @@ Binder explore dataset Binder allows you to spin up custom computing environment File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. -TurboCurator by ICPSR configure dataset "TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI’s ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation." +TurboCurator by ICPSR configure dataset "TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI’s ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation." From 905c8cf906857feb2e7231f31c1a2e224b33d26b Mon Sep 17 00:00:00 2001 From: raravumich <48064835+raravumich@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:36:27 -0500 Subject: [PATCH 0616/1112] added correct tabs --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 05263498977..10f9a6a6062 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -5,4 +5,4 @@ Binder explore dataset Binder allows you to spin up custom computing environment File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. -TurboCurator by ICPSR configure dataset "TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI’s ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation." +TurboCurator by ICPSR configure dataset "TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI’s ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation." From 5760c259ae493ce3670eefcd850480e5106133ef Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 5 Feb 2024 15:11:55 -0500 Subject: [PATCH 0617/1112] fix formatting #10279 --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 10f9a6a6062..c22392a7c5e 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -5,4 +5,4 @@ Binder explore dataset Binder allows you to spin up custom computing environment File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. -TurboCurator by ICPSR configure dataset "TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI’s ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation." +TurboCurator by ICPSR configure dataset TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI's ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation. From a92560059ecd18a081a063a08f4c5a998fb1e3d4 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 5 Feb 2024 19:33:33 -0500 Subject: [PATCH 0618/1112] Fix to provide latest version metadata --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index ea74368d110..e3505cbbb33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -778,7 +778,7 @@ public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @ @Path("{id}/metadata") @Produces("application/ld+json, application/json-ld") public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return getVersionJsonLDMetadata(crc, id, DS_VERSION_DRAFT, uriInfo, headers); + return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers); } @PUT diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index 125753296a2..cd292a40a1e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -1202,7 +1202,7 @@ public void testGeospatialSearch() { .add("value", "42.33661") .add("typeClass", "primitive") .add("multiple", false) - .add("typeName", "southLongitude") + .add("typeName", "southLongitud e") ) .add("eastLongitude", Json.createObjectBuilder() From f4b94837a8dbfa1f657ab31ba66a83c5abd4d5e7 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 6 Feb 2024 12:32:57 +0000 Subject: [PATCH 0619/1112] Stash: getFileData by datasetVersionId param WIP --- .../edu/harvard/iq/dataverse/api/Files.java | 28 +++++++++---------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 5d400ee1438..6a9b1803583 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -4,7 +4,6 @@ import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -13,7 +12,6 @@ import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.FileDownloadServiceBean; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; @@ -51,6 +49,7 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.URLTokenUtil; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -81,7 +80,6 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonDT; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; -import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import jakarta.ws.rs.core.UriInfo; import org.glassfish.jersey.media.multipart.FormDataBodyPart; @@ -500,22 +498,22 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa .type(MediaType.TEXT_PLAIN) //Our plain text string is already json .build(); } - + @GET @AuthRequired - @Path("{id}/draft") - public Response getFileDataDraft(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception { - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, true); + @Path("{id}") + public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, null); } - + @GET @AuthRequired - @Path("{id}") - public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception { - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, false); + @Path("{id}/{datasetVersionId}") + public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("datasetVersionId") String datasetVersionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, datasetVersionId); } - - private Response getFileDataResponse(User user, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response, boolean draft ){ + + private Response getFileDataResponse(User user, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, String datasetVersionId){ DataverseRequest req; try { @@ -532,7 +530,7 @@ private Response getFileDataResponse(User user, String fileIdOrPersistentId, Uri FileMetadata fm; - if (draft) { + if (datasetVersionId.equals(DS_VERSION_DRAFT)) { try { fm = execCommand(new GetDraftFileMetadataIfAvailableCommand(req, df)); } catch (WrappedResponse w) { @@ -547,7 +545,7 @@ private Response getFileDataResponse(User user, String fileIdOrPersistentId, Uri try { fm = df.getLatestPublishedFileMetadata(); - + } catch (UnsupportedOperationException e) { try { fm = execCommand(new GetDraftFileMetadataIfAvailableCommand(req, df)); From ae9b74fd4592103e1c8135655d312bb7ef0c24d7 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 6 Feb 2024 09:27:09 -0500 Subject: [PATCH 0620/1112] #10229 fix popup list --- src/main/java/edu/harvard/iq/dataverse/DataversePage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 943a74327d5..3dbc22902b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -362,7 +362,7 @@ public void initFeaturedDataverses() { List featuredSource = new ArrayList<>(); List featuredTarget = new ArrayList<>(); featuredSource.addAll(dataverseService.findAllPublishedByOwnerId(dataverse.getId())); - featuredSource.addAll(linkingService.findLinkingDataverses(dataverse.getId())); + featuredSource.addAll(linkingService.findLinkedDataverses(dataverse.getId())); List featuredList = featuredDataverseService.findByDataverseId(dataverse.getId()); for (DataverseFeaturedDataverse dfd : featuredList) { Dataverse fd = dfd.getFeaturedDataverse(); From 4309ab06308f1be2333dcf40bc0bda3c11022437 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 6 Feb 2024 09:34:01 -0500 Subject: [PATCH 0621/1112] #10229 add to error message --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 157f2ecaf54..f1c8381816c 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -875,7 +875,7 @@ dataverse.option.deleteDataverse=Delete Dataverse dataverse.publish.btn=Publish dataverse.publish.header=Publish Dataverse dataverse.nopublished=No Published Dataverses -dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. +dataverse.nopublished.tip=In order to use this feature you must have at least one published or linked dataverse. dataverse.contact=Email Dataverse Contact dataverse.link=Link Dataverse dataverse.link.btn.tip=Link to Your Dataverse From 2f7ce01fd67539a9213d87884dc229e689a055da Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 6 Feb 2024 10:38:44 -0500 Subject: [PATCH 0622/1112] Add to DatasetsIT testSemanticMetadataAPIs test cases for published and draft --- .../harvard/iq/dataverse/api/DatasetsIT.java | 60 +++++++++++++++++-- 1 file changed, 56 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6e6855306e4..e1c4b901116 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3013,6 +3013,46 @@ public void testSemanticMetadataAPIs() { response = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, badTerms, false); response.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + //We publish the dataset and dataverse + UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + //We check the version is published + response = UtilIT.getDatasetJsonLDMetadata(datasetId, apiToken); + response.prettyPrint(); + jsonLDString = getData(response.getBody().asString()); + jsonLDObject = JSONLDUtil.decontextualizeJsonLD(jsonLDString); + String publishedVersion = jsonLDObject.getString("http://schema.org/version"); + assertNotEquals("DRAFT", publishedVersion); + + // Upload a file so a draft version is created + String pathToFile = "src/main/webapp/resources/images/cc0.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + int fileID = uploadResponse.jsonPath().getInt("data.files[0].dataFile.id"); + + //We check the authenticated user gets DRAFT + response = UtilIT.getDatasetJsonLDMetadata(datasetId, apiToken); + response.prettyPrint(); + jsonLDString = getData(response.getBody().asString()); + jsonLDObject = JSONLDUtil.decontextualizeJsonLD(jsonLDString); + assertEquals("DRAFT", jsonLDObject.getString("http://schema.org/version")); + + // Create user with no permission and check they get published version + String apiTokenNoPerms = UtilIT.createRandomUserGetToken(); + response = UtilIT.getDatasetJsonLDMetadata(datasetId, apiTokenNoPerms); + response.prettyPrint(); + jsonLDString = getData(response.getBody().asString()); + jsonLDObject = JSONLDUtil.decontextualizeJsonLD(jsonLDString); + assertNotEquals("DRAFT", jsonLDObject.getString("http://schema.org/version")); + + // Delete the file + Response deleteFileResponse = UtilIT.deleteFileInDataset(fileID, apiToken); + deleteFileResponse.prettyPrint(); + deleteFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + // Delete the terms of use response = UtilIT.deleteDatasetJsonLDMetadata(datasetId, apiToken, "{\"https://dataverse.org/schema/core#termsOfUse\": \"New terms\"}"); @@ -3026,15 +3066,27 @@ public void testSemanticMetadataAPIs() { jsonLDObject = JSONLDUtil.decontextualizeJsonLD(jsonLDString); assertTrue(!jsonLDObject.containsKey("https://dataverse.org/schema/core#termsOfUse")); - // Cleanup - delete dataset, dataverse, user... - Response deleteDatasetResponse = UtilIT.deleteDatasetViaNativeApi(datasetId, apiToken); - deleteDatasetResponse.prettyPrint(); - assertEquals(200, deleteDatasetResponse.getStatusCode()); + //Delete the DRAFT dataset + Response deleteDraftResponse = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, DS_VERSION_DRAFT, apiToken); + deleteDraftResponse.prettyPrint(); + deleteDraftResponse.then().assertThat().statusCode(OK.getStatusCode()); + + //We set the user as superuser so we can delete the published dataset + Response superUserResponse = UtilIT.makeSuperUser(username); + superUserResponse.prettyPrint(); + deleteDraftResponse.then().assertThat().statusCode(OK.getStatusCode()); + + //Delete the published dataset + Response deletePublishedResponse = UtilIT.deleteDatasetViaNativeApi(datasetId, apiToken); + deletePublishedResponse.prettyPrint(); + deleteDraftResponse.then().assertThat().statusCode(OK.getStatusCode()); + //Delete the dataverse Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); deleteDataverseResponse.prettyPrint(); assertEquals(200, deleteDataverseResponse.getStatusCode()); + //Delete the user Response deleteUserResponse = UtilIT.deleteUser(username); deleteUserResponse.prettyPrint(); assertEquals(200, deleteUserResponse.getStatusCode()); From 9568c20359234bbe87b17656c91926ab11329a57 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 6 Feb 2024 10:53:24 -0500 Subject: [PATCH 0623/1112] Add release notes --- doc/release-notes/10297-metadata-api-fix.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10297-metadata-api-fix.md diff --git a/doc/release-notes/10297-metadata-api-fix.md b/doc/release-notes/10297-metadata-api-fix.md new file mode 100644 index 00000000000..11ee086af04 --- /dev/null +++ b/doc/release-notes/10297-metadata-api-fix.md @@ -0,0 +1 @@ +The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. From 2f167cf57def265d719f52a7211ed6648b7e3df8 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 6 Feb 2024 10:56:03 -0500 Subject: [PATCH 0624/1112] Restore SearchIT --- src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index cd292a40a1e..125753296a2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -1202,7 +1202,7 @@ public void testGeospatialSearch() { .add("value", "42.33661") .add("typeClass", "primitive") .add("multiple", false) - .add("typeName", "southLongitud e") + .add("typeName", "southLongitude") ) .add("eastLongitude", Json.createObjectBuilder() From df4f49a1650070427a710046be32b7c5f6ad5312 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 6 Feb 2024 14:43:38 -0500 Subject: [PATCH 0625/1112] add release note #10238 --- doc/release-notes/10238-container-demo.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10238-container-demo.md diff --git a/doc/release-notes/10238-container-demo.md b/doc/release-notes/10238-container-demo.md new file mode 100644 index 00000000000..edc4db4b650 --- /dev/null +++ b/doc/release-notes/10238-container-demo.md @@ -0,0 +1 @@ +The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container From ce4b1e0418b31a9a4db9fa7ab1926f17459a046c Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 6 Feb 2024 14:50:35 -0500 Subject: [PATCH 0626/1112] Change the workflow section including feedback from @sekmiller --- doc/sphinx-guides/source/qa/overview.md | 27 ++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index 64796357831..a5b613f6516 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -11,11 +11,28 @@ This guide describes the testing process used by QA at IQSS and provides a refer ## Workflow -The basic workflow is as follows. Bugs or feature requests are submitted to GitHub by the community or by team members as [issues](https://github.com/IQSS/dataverse/issues). These issues are prioritized and added to a two-week sprint that is reflected on the GitHub {ref}`kanban-board`. As developers work on these issues, a GitHub branch is produced, code is contributed, and a pull request is made to merge these new changes back into the common {ref}`develop branch ` and ultimately released as part of the product. - -Before a pull request is moved to QA, it must be reviewed by a member of the development team from a coding perspective, and it must pass automated tests. There it is tested manually, exercising the UI (using three common browsers) and any business logic it implements. - -Depending on whether the code modifies existing code or is completely new, a smoke test of core functionality is performed and some basic regression testing of modified or related code is performed. Any documentation provided is used to understand the feature and any assertions made in that documentation are tested. Once this passes and any bugs that are found are corrected, and the automated tests are confirmed to be passing, the PR is merged into the develop branch, the PR is closed, and the branch is deleted (if it is local). At this point, the PR moves from the QA column automatically into the Merged column (where it might be discussed at the next standup) and the process repeats with the next PR until it is decided to {doc}`make a release `. +Here is a brief description of our workflow: + +### Issue Submission and Prioritization: +- Members of the community or the development team submit bugs or request features through GitHub as [Issues](https://github.com/IQSS/dataverse/issues)sues. +- These Issues are prioritized and added to a two-week-long sprint that can be tracked on the {ref}`kanban-board`. + +### Development Process: +- Developers will work on a solution on a separate branch +- Once a developer completes their work, they submit a [Pull Request](https://github.com/IQSS/dataverse/pulls) (PR). +- The PR is reviewed by a developer from the team. +- During the review, the reviewer may suggest coding or documentation changes to the original developer. + +### Quality Assurance (QA) Testing: +- The QA tester performs a smoke test of core functionality and regression testing. +- Documentation is used to understand the feature and validate any assertions made. +- If no documentation is provided in the PR, the tester may refer to the original bug report to determine the desired outcome of the changes. +- Once the branch is assumed to be safe, it is merged into the develop branch. + +### Final Steps: +- The PR and the Issue are closed and assigned the “merged” status. +- It is good practice to delete the branch if it is local. +- The content from the PR becomes part of the codebase for {doc}`future releases `. The complete suggested workflow can be found at {doc}`qa-workflow`. From de3bad6e6ec000f182c9a50e019f155cb0c20fb9 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 6 Feb 2024 14:53:05 -0500 Subject: [PATCH 0627/1112] Typo correction --- doc/sphinx-guides/source/qa/overview.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/qa/overview.md b/doc/sphinx-guides/source/qa/overview.md index a5b613f6516..60e6a28ee9a 100644 --- a/doc/sphinx-guides/source/qa/overview.md +++ b/doc/sphinx-guides/source/qa/overview.md @@ -14,7 +14,7 @@ This guide describes the testing process used by QA at IQSS and provides a refer Here is a brief description of our workflow: ### Issue Submission and Prioritization: -- Members of the community or the development team submit bugs or request features through GitHub as [Issues](https://github.com/IQSS/dataverse/issues)sues. +- Members of the community or the development team submit bugs or request features through GitHub as [Issues](https://github.com/IQSS/dataverse/issues). - These Issues are prioritized and added to a two-week-long sprint that can be tracked on the {ref}`kanban-board`. ### Development Process: From bf3c2c7100b996e4a1a5d4e4616c99c880b9674a Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 7 Feb 2024 12:13:55 +0000 Subject: [PATCH 0628/1112] Fixed: if condition and endpoint path --- .../java/edu/harvard/iq/dataverse/api/Files.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 6a9b1803583..95117162094 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -508,7 +508,7 @@ public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id @GET @AuthRequired - @Path("{id}/{datasetVersionId}") + @Path("{id}/versions/{datasetVersionId}") public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("datasetVersionId") String datasetVersionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, datasetVersionId); } @@ -530,7 +530,7 @@ private Response getFileDataResponse(User user, String fileIdOrPersistentId, Uri FileMetadata fm; - if (datasetVersionId.equals(DS_VERSION_DRAFT)) { + if (datasetVersionId != null && datasetVersionId.equals(DS_VERSION_DRAFT)) { try { fm = execCommand(new GetDraftFileMetadataIfAvailableCommand(req, df)); } catch (WrappedResponse w) { @@ -558,19 +558,19 @@ private Response getFileDataResponse(User user, String fileIdOrPersistentId, Uri } } - + if (fm.getDatasetVersion().isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountLoggingServiceBean.MakeDataCountEntry(uriInfo, headers, dvRequestService, df); mdcLogService.logEntry(entry); - } - + } + return Response.ok(Json.createObjectBuilder() .add("status", ApiConstants.STATUS_OK) .add("data", json(fm)).build()) .type(MediaType.APPLICATION_JSON) .build(); } - + @GET @AuthRequired @Path("{id}/metadata") From c8f8227ffa2efe79d6840fe7641a7c23690caea8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 7 Feb 2024 13:11:00 +0000 Subject: [PATCH 0629/1112] Added: new commands for getting FileMetadata --- ...etDraftFileMetadataIfAvailableCommand.java | 19 ++++---- ...etLatestAccessibleFileMetadataCommand.java | 35 +++++++++++++++ ...GetLatestPublishedFileMetadataCommand.java | 28 ++++++++++++ ...edFileMetadataByDatasetVersionCommand.java | 43 +++++++++++++++++++ 4 files changed, 114 insertions(+), 11 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java index 14999548b34..4673f45412a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.authorization.Permission; @@ -12,25 +11,23 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; /** - * * @author Matthew */ -@RequiredPermissions( Permission.ViewUnpublishedDataset ) -public class GetDraftFileMetadataIfAvailableCommand extends AbstractCommand{ - private final DataFile df; +@RequiredPermissions(Permission.ViewUnpublishedDataset) +public class GetDraftFileMetadataIfAvailableCommand extends AbstractCommand { + private final DataFile dataFile; public GetDraftFileMetadataIfAvailableCommand(DataverseRequest aRequest, DataFile dataFile) { super(aRequest, dataFile); - df = dataFile; + this.dataFile = dataFile; } @Override public FileMetadata execute(CommandContext ctxt) throws CommandException { - FileMetadata fm = df.getLatestFileMetadata(); - if(fm.getDatasetVersion().getVersionState().equals(DatasetVersion.VersionState.DRAFT)) { - return df.getLatestFileMetadata(); - } + FileMetadata latestFileMetadata = dataFile.getLatestFileMetadata(); + if (latestFileMetadata.getDatasetVersion().isDraft()) { + return latestFileMetadata; + } return null; } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java new file mode 100644 index 00000000000..306221ed86c --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java @@ -0,0 +1,35 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +@RequiredPermissions(Permission.ViewUnpublishedDataset) +public class GetLatestAccessibleFileMetadataCommand extends AbstractCommand { + private final DataFile dataFile; + + public GetLatestAccessibleFileMetadataCommand(DataverseRequest aRequest, DataFile dataFile) { + super(aRequest, dataFile); + this.dataFile = dataFile; + } + + @Override + public FileMetadata execute(CommandContext ctxt) throws CommandException { + FileMetadata fileMetadata = ctxt.engine().submit( + new GetLatestPublishedFileMetadataCommand(getRequest(), dataFile) + ); + + if (fileMetadata == null) { + fileMetadata = ctxt.engine().submit( + new GetDraftFileMetadataIfAvailableCommand(getRequest(), dataFile) + ); + } + + return fileMetadata; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java new file mode 100644 index 00000000000..147a0fdce76 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java @@ -0,0 +1,28 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +@RequiredPermissions({}) +public class GetLatestPublishedFileMetadataCommand extends AbstractCommand { + private final DataFile dataFile; + + public GetLatestPublishedFileMetadataCommand(DataverseRequest aRequest, DataFile dataFile) { + super(aRequest, dataFile); + this.dataFile = dataFile; + } + + @Override + public FileMetadata execute(CommandContext ctxt) throws CommandException { + try { + return dataFile.getLatestPublishedFileMetadata(); + } catch (UnsupportedOperationException e) { + return null; + } + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java new file mode 100644 index 00000000000..564b81d62ac --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java @@ -0,0 +1,43 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.List; + +@RequiredPermissions({}) +public class GetSpecificPublishedFileMetadataByDatasetVersionCommand extends AbstractCommand { + private final long majorVersion; + private final long minorVersion; + private final DataFile dataFile; + + public GetSpecificPublishedFileMetadataByDatasetVersionCommand(DataverseRequest aRequest, DataFile dataFile, long majorVersionNum, long minorVersionNum) { + super(aRequest, dataFile); + this.dataFile = dataFile; + majorVersion = majorVersionNum; + minorVersion = minorVersionNum; + } + + @Override + public FileMetadata execute(CommandContext ctxt) throws CommandException { + List fileMetadatas = dataFile.getFileMetadatas(); + + for (FileMetadata fileMetadata : fileMetadatas) { + DatasetVersion datasetVersion = fileMetadata.getDatasetVersion(); + + if (datasetVersion.isPublished() && + datasetVersion.getVersionNumber().equals(majorVersion) && + datasetVersion.getMinorVersionNumber().equals(minorVersion)) { + return fileMetadata; + } + } + + return null; + } +} From 8cda7ce79278797767664291ec3d4175a11a14f9 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 7 Feb 2024 13:13:33 +0000 Subject: [PATCH 0630/1112] Added: readability minor change --- ...pecificPublishedFileMetadataByDatasetVersionCommand.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java index 564b81d62ac..84a51f6b31d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java @@ -17,11 +17,11 @@ public class GetSpecificPublishedFileMetadataByDatasetVersionCommand extends Abs private final long minorVersion; private final DataFile dataFile; - public GetSpecificPublishedFileMetadataByDatasetVersionCommand(DataverseRequest aRequest, DataFile dataFile, long majorVersionNum, long minorVersionNum) { + public GetSpecificPublishedFileMetadataByDatasetVersionCommand(DataverseRequest aRequest, DataFile dataFile, long majorVersion, long minorVersion) { super(aRequest, dataFile); this.dataFile = dataFile; - majorVersion = majorVersionNum; - minorVersion = minorVersionNum; + this.majorVersion = majorVersion; + this.minorVersion = minorVersion; } @Override From 99555017b3890c3d7c8c244ee5bae92f7ec962da Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 7 Feb 2024 10:16:32 -0500 Subject: [PATCH 0631/1112] remove superflous double quotes #10240 --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f161dd67ca9..d6f88df3235 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3536,7 +3536,7 @@ When the dataset version is a draft or deaccessioned, authentication is required export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export FILE_ID=42 - export DATASET_VERSION=":draft" + export DATASET_VERSION=:draft curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$FILE_ID/versions/$DATASET_VERSION/citation" From 316524a45e85ca359357b568774dd1493e913e2e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 7 Feb 2024 10:26:48 -0500 Subject: [PATCH 0632/1112] move English to bundle #10240 --- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 4 ++-- src/main/java/propertyFiles/Bundle.properties | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index ed331e6835d..f7cdf2df10b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -980,13 +980,13 @@ public Command handleLatestPublished() { })); if (dsv == null) { - return unauthorized("Dataset version cannot be found or unauthorized."); + return unauthorized(BundleUtil.getStringFromBundle("files.api.no.draftOrUnauth")); } Long getDatasetVersionID = dsv.getId(); FileMetadata fm = dataFileServiceBean.findFileMetadataByDatasetVersionIdAndDataFileId(getDatasetVersionID, df.getId()); if (fm == null) { - return notFound("File could not be found."); + return notFound(BundleUtil.getStringFromBundle("files.api.fileNotFound")); } boolean direct = false; DataCitation citation = new DataCitation(fm, direct); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 157f2ecaf54..5ecab876e01 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2633,7 +2633,9 @@ admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java files.api.metadata.update.duplicateFile=Filename already exists at {0} files.api.no.draft=No draft available for this file +files.api.no.draftOrUnauth=Dataset version cannot be found or unauthorized. files.api.only.tabular.supported=This operation is only available for tabular files. +files.api.fileNotFound=File could not be found. #Datasets.java datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. From e9ab40bdaadb9246a8d191ff6c76ba6dc16762e5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 7 Feb 2024 10:44:16 -0500 Subject: [PATCH 0633/1112] simplify and rename tests #10240 --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 33 ++++++------------- 1 file changed, 10 insertions(+), 23 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 1e8a806faa2..53f8aa40e4a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2487,7 +2487,7 @@ public void testCollectionStorageQuotas() { } @Test - public void testFileCitation() throws IOException { + public void testFileCitationByVersion() throws IOException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -2502,24 +2502,11 @@ public void testFileCitation() throws IOException { Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); String datasetPid = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); - Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, false, apiToken); - getDatasetVersionCitationResponse.prettyPrint(); - getDatasetVersionCitationResponse.then().assertThat() - .statusCode(OK.getStatusCode()) - // We check that the returned message contains information expected for the citation string - .body("data.message", containsString("DRAFT VERSION")); - - Path pathToTxt = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "file.txt"); - String contentOfTxt = "foobar"; - java.nio.file.Files.write(pathToTxt, contentOfTxt.getBytes()); - - Response uploadFileTxt = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTxt.toString(), apiToken); - uploadFileTxt.prettyPrint(); - uploadFileTxt.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data.files[0].label", equalTo("file.txt")); + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadFile.then().assertThat().statusCode(OK.getStatusCode()); - Integer fileId = JsonPath.from(uploadFileTxt.body().asString()).getInt("data.files[0].dataFile.id"); + Integer fileId = JsonPath.from(uploadFile.body().asString()).getInt("data.files[0].dataFile.id"); String pidAsUrl = "https://doi.org/" + datasetPid.split("doi:")[1]; int currentYear = Year.now().getValue(); @@ -2540,7 +2527,7 @@ public void testFileCitation() throws IOException { getFileCitationDraft.prettyPrint(); getFileCitationDraft.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; file.txt [fileName]")); + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; coffeeshop.png [fileName]")); Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -2556,7 +2543,7 @@ public void testFileCitation() throws IOException { String updateJsonString = """ { - "label": "foo.txt" + "label": "foo.png" } """; @@ -2568,13 +2555,13 @@ public void testFileCitation() throws IOException { getFileCitationPostV1Draft.prettyPrint(); getFileCitationPostV1Draft.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; foo.txt [fileName]")); + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, DRAFT VERSION; foo.png [fileName]")); Response getFileCitationV1OldFilename = UtilIT.getFileCitation(fileId, "1.0", apiToken); getFileCitationV1OldFilename.prettyPrint(); getFileCitationV1OldFilename.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1; file.txt [fileName]")); + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1; coffeeshop.png [fileName]")); UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken) .then().assertThat().statusCode(OK.getStatusCode()); @@ -2587,7 +2574,7 @@ public void testFileCitation() throws IOException { getFileCitationV1PostDeaccessionAuthor.prettyPrint(); getFileCitationV1PostDeaccessionAuthor.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1, DEACCESSIONED VERSION; file.txt [fileName]")); + .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1, DEACCESSIONED VERSION; coffeeshop.png [fileName]")); Response getFileCitationV1PostDeaccessionNoApiToken = UtilIT.getFileCitation(fileId, "1.0", null); getFileCitationV1PostDeaccessionNoApiToken.prettyPrint(); From a2194d9ce6229072a19f1ccd5a13a70e6546f447 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 7 Feb 2024 10:48:37 -0500 Subject: [PATCH 0634/1112] stop using var; make consistent with older method #10240 --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index b5957a756d3..b51d6af75a9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3475,11 +3475,11 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, boo } static Response getFileCitation(Integer fileId, String datasetVersion, String apiToken) { - var spec = given(); + RequestSpecification requestSpecification = given(); if (apiToken != null) { - spec.header(API_TOKEN_HTTP_HEADER, apiToken); + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); } - return spec.get("/api/files/" + fileId + "/versions/" + datasetVersion + "/citation"); + return requestSpecification.get("/api/files/" + fileId + "/versions/" + datasetVersion + "/citation"); } static Response getVersionFiles(Integer datasetId, From bec394519826529c02adedfdd601f04b45f859c2 Mon Sep 17 00:00:00 2001 From: landreev Date: Wed, 7 Feb 2024 11:50:52 -0500 Subject: [PATCH 0635/1112] 8524 adding mechanism for storing tab. files with variable headers (#10282) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * "stored with header" flag #8524 * more changes for the streaming and redirect code. #8524 * disabling dynamically-generated varheader in the remaining storage drivers. #8524 * Ingest plugins (work in progress) #8524 * R ingest plugin (#8524) * still some unaddressed @todo:s, but the branch should build and the unit tests should be passing. # 8524 * work-in-progress, on the subsetting code in the download instance writer. #8524 * more work-in-progress changes. removing all the unused code from TabularSubsetGenerator, for clarity etc. #8524 * more bits and pieces #8524 * 2 more ingest plugins. #8542 * Integration tests. #8524 * typo #8524 * documenting the new setting. #8524 * a release note for the pr. also, added the "storage quotas enabled" to the list of settings documented in the config guide while I was at it. #8524 * removed all the unused code from this class (lots of it) for clarity, etc. git history can be consulted if anyone is curious about what we used to do here. #8524 * removing @todo: that's no longer relevant #8524 * (cosmetic) defined the control constants used in the integration test. #8524 --- ...4-storing-tabular-files-with-varheaders.md | 6 + .../source/installation/config.rst | 22 + .../edu/harvard/iq/dataverse/DataTable.java | 18 + .../dataverse/api/DownloadInstanceWriter.java | 78 +- .../harvard/iq/dataverse/api/TestIngest.java | 2 +- .../iq/dataverse/dataaccess/FileAccessIO.java | 3 +- .../dataaccess/GlobusOverlayAccessIO.java | 8 +- .../dataaccess/RemoteOverlayAccessIO.java | 8 +- .../iq/dataverse/dataaccess/S3AccessIO.java | 3 +- .../dataverse/dataaccess/SwiftAccessIO.java | 3 +- .../dataaccess/TabularSubsetGenerator.java | 1150 +---------------- .../dataaccess/TabularSubsetInputStream.java | 114 -- .../export/DDIExportServiceBean.java | 11 + .../dataverse/ingest/IngestServiceBean.java | 64 +- .../tabulardata/TabularDataFileReader.java | 26 +- .../impl/plugins/csv/CSVFileReader.java | 24 +- .../impl/plugins/dta/DTAFileReader.java | 11 +- .../impl/plugins/dta/NewDTAFileReader.java | 19 +- .../impl/plugins/por/PORFileReader.java | 13 +- .../impl/plugins/rdata/RDATAFileReader.java | 4 +- .../impl/plugins/rdata/RTabFileParser.java | 28 +- .../impl/plugins/sav/SAVFileReader.java | 24 +- .../impl/plugins/xlsx/XLSXFileReader.java | 11 +- .../settings/SettingsServiceBean.java | 7 +- .../iq/dataverse/util/SystemConfig.java | 8 + ...24-store-tabular-files-with-varheaders.sql | 1 + .../edu/harvard/iq/dataverse/api/FilesIT.java | 128 ++ .../dataverse/ingest/IngestFrequencyTest.java | 2 +- .../impl/plugins/csv/CSVFileReaderTest.java | 24 +- .../impl/plugins/dta/DTAFileReaderTest.java | 2 +- .../plugins/dta/NewDTAFileReaderTest.java | 14 +- 31 files changed, 501 insertions(+), 1335 deletions(-) create mode 100644 doc/release-notes/8524-storing-tabular-files-with-varheaders.md delete mode 100644 src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetInputStream.java create mode 100644 src/main/resources/db/migration/V6.1.0.2__8524-store-tabular-files-with-varheaders.sql diff --git a/doc/release-notes/8524-storing-tabular-files-with-varheaders.md b/doc/release-notes/8524-storing-tabular-files-with-varheaders.md new file mode 100644 index 00000000000..f7034c846f6 --- /dev/null +++ b/doc/release-notes/8524-storing-tabular-files-with-varheaders.md @@ -0,0 +1,6 @@ +Tabular Data Ingest can now save the generated archival files with the list of variable names added as the first tab-delimited line. As the most significant effect of this feature, +Access API will be able to take advantage of Direct Download for tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed content on the fly. + +This behavior is controlled by the new setting `:StoreIngestedTabularFilesWithVarHeaders`. It is false by default, preserving the legacy behavior. When enabled, Dataverse will be able to handle both the newly ingested files, and any already-existing legacy files stored without these headers transparently to the user. E.g. the access API will continue delivering tab-delimited files **with** this header line, whether it needs to add it dynamically for the legacy files, or reading complete files directly from storage for the ones stored with it. + +An API for converting existing legacy tabular files will be added separately. [this line will need to be changed if we have time to add said API before 6.2 is released]. \ No newline at end of file diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index a7d7905ca4a..c233e594fa7 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -4151,3 +4151,25 @@ A true/false (default) option determining whether the dataset datafile table dis .. _supported MicroProfile Config API source: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html + +.. _:UseStorageQuotas: + +:UseStorageQuotas ++++++++++++++++++ + +Enables storage use quotas in collections. See the :doc:`/api/native-api` for details. + + +.. _:StoreIngestedTabularFilesWithVarHeaders: + +:StoreIngestedTabularFilesWithVarHeaders +++++++++++++++++++++++++++++++++++++++++ + +With this setting enabled, tabular files produced during Ingest will +be stored with the list of variable names added as the first +tab-delimited line. As the most significant effect of this feature, +Access API will be able to take advantage of Direct Download for +tab. files saved with these headers on S3 - since they no longer have +to be generated and added to the streamed file on the fly. + +The setting is ``false`` by default, preserving the legacy behavior. diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTable.java b/src/main/java/edu/harvard/iq/dataverse/DataTable.java index a17d8c65138..95f3aed0f40 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataTable.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataTable.java @@ -112,6 +112,16 @@ public DataTable() { @Column( nullable = true ) private String originalFileName; + + /** + * The physical tab-delimited file is in storage with the list of variable + * names saved as the 1st line. This means that we do not need to generate + * this line on the fly. (Also means that direct download mechanism can be + * used for this file!) + */ + @Column(nullable = false) + private boolean storedWithVariableHeader = false; + /* * Getter and Setter methods: */ @@ -206,6 +216,14 @@ public void setOriginalFileName(String originalFileName) { this.originalFileName = originalFileName; } + public boolean isStoredWithVariableHeader() { + return storedWithVariableHeader; + } + + public void setStoredWithVariableHeader(boolean storedWithVariableHeader) { + this.storedWithVariableHeader = storedWithVariableHeader; + } + /* * Custom overrides for hashCode(), equals() and toString() methods: */ diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index bcb8799ec9e..89b22b76a7d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -22,7 +22,6 @@ import jakarta.ws.rs.ext.Provider; import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.dataaccess.*; import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.engine.command.Command; @@ -104,8 +103,10 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] String auxiliaryTag = null; String auxiliaryType = null; String auxiliaryFileName = null; + // Before we do anything else, check if this download can be handled // by a redirect to remote storage (only supported on S3, as of 5.4): + if (storageIO.downloadRedirectEnabled()) { // Even if the above is true, there are a few cases where a @@ -159,7 +160,7 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] } } else if (dataFile.isTabularData()) { - // Many separate special cases here. + // Many separate special cases here. if (di.getConversionParam() != null) { if (di.getConversionParam().equals("format")) { @@ -180,12 +181,26 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] redirectSupported = false; } } - } else if (!di.getConversionParam().equals("noVarHeader")) { - // This is a subset request - can't do. + } else if (di.getConversionParam().equals("noVarHeader")) { + // This will work just fine, if the tab. file is + // stored without the var. header. Throw "unavailable" + // exception otherwise. + // @todo: should we actually drop support for this "noVarHeader" flag? + if (dataFile.getDataTable().isStoredWithVariableHeader()) { + throw new ServiceUnavailableException(); + } + // ... defaults to redirectSupported = true + } else { + // This must be a subset request then - can't do. + redirectSupported = false; + } + } else { + // "straight" download of the full tab-delimited file. + // can redirect, but only if stored with the variable + // header already added: + if (!dataFile.getDataTable().isStoredWithVariableHeader()) { redirectSupported = false; } - } else { - redirectSupported = false; } } } @@ -247,11 +262,16 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] // finally, issue the redirect: Response response = Response.seeOther(redirect_uri).build(); logger.fine("Issuing redirect to the file location."); + // Yes, this throws an exception. It's not an exception + // as in, "bummer, something went wrong". This is how a + // redirect is produced here! throw new RedirectionException(response); } throw new ServiceUnavailableException(); } + // Past this point, this is a locally served/streamed download + if (di.getConversionParam() != null) { // Image Thumbnail and Tabular data conversion: // NOTE: only supported on local files, as of 4.0.2! @@ -285,9 +305,14 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] // request any tabular-specific services. if (di.getConversionParam().equals("noVarHeader")) { - logger.fine("tabular data with no var header requested"); - storageIO.setNoVarHeader(Boolean.TRUE); - storageIO.setVarHeader(null); + if (!dataFile.getDataTable().isStoredWithVariableHeader()) { + logger.fine("tabular data with no var header requested"); + storageIO.setNoVarHeader(Boolean.TRUE); + storageIO.setVarHeader(null); + } else { + logger.fine("can't serve request for tabular data without varheader, since stored with it"); + throw new ServiceUnavailableException(); + } } else if (di.getConversionParam().equals("format")) { // Conversions, and downloads of "stored originals" are // now supported on all DataFiles for which StorageIO @@ -329,11 +354,10 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] if (variable.getDataTable().getDataFile().getId().equals(dataFile.getId())) { logger.fine("adding variable id " + variable.getId() + " to the list."); variablePositionIndex.add(variable.getFileOrder()); - if (subsetVariableHeader == null) { - subsetVariableHeader = variable.getName(); - } else { - subsetVariableHeader = subsetVariableHeader.concat("\t"); - subsetVariableHeader = subsetVariableHeader.concat(variable.getName()); + if (!dataFile.getDataTable().isStoredWithVariableHeader()) { + subsetVariableHeader = subsetVariableHeader == null + ? variable.getName() + : subsetVariableHeader.concat("\t" + variable.getName()); } } else { logger.warning("variable does not belong to this data file."); @@ -346,7 +370,17 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] try { File tempSubsetFile = File.createTempFile("tempSubsetFile", ".tmp"); TabularSubsetGenerator tabularSubsetGenerator = new TabularSubsetGenerator(); - tabularSubsetGenerator.subsetFile(storageIO.getInputStream(), tempSubsetFile.getAbsolutePath(), variablePositionIndex, dataFile.getDataTable().getCaseQuantity(), "\t"); + + long numberOfLines = dataFile.getDataTable().getCaseQuantity(); + if (dataFile.getDataTable().isStoredWithVariableHeader()) { + numberOfLines++; + } + + tabularSubsetGenerator.subsetFile(storageIO.getInputStream(), + tempSubsetFile.getAbsolutePath(), + variablePositionIndex, + numberOfLines, + "\t"); if (tempSubsetFile.exists()) { FileInputStream subsetStream = new FileInputStream(tempSubsetFile); @@ -354,8 +388,11 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] InputStreamIO subsetStreamIO = new InputStreamIO(subsetStream, subsetSize); logger.fine("successfully created subset output stream."); - subsetVariableHeader = subsetVariableHeader.concat("\n"); - subsetStreamIO.setVarHeader(subsetVariableHeader); + + if (subsetVariableHeader != null) { + subsetVariableHeader = subsetVariableHeader.concat("\n"); + subsetStreamIO.setVarHeader(subsetVariableHeader); + } String tabularFileName = storageIO.getFileName(); @@ -380,8 +417,13 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] } else { logger.fine("empty list of extra arguments."); } + // end of tab. data subset case + } else if (dataFile.getDataTable().isStoredWithVariableHeader()) { + logger.fine("tabular file stored with the var header included, no need to generate it on the fly"); + storageIO.setNoVarHeader(Boolean.TRUE); + storageIO.setVarHeader(null); } - } + } // end of tab. data file case if (storageIO == null) { //throw new WebApplicationException(Response.Status.SERVICE_UNAVAILABLE); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java index 05ba150df8e..add43ea2091 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java @@ -100,7 +100,7 @@ public String datafile(@QueryParam("fileName") String fileName, @QueryParam("fil TabularDataIngest tabDataIngest = null; try { - tabDataIngest = ingestPlugin.read(fileInputStream, null); + tabDataIngest = ingestPlugin.read(fileInputStream, false, null); } catch (IOException ingestEx) { output = output.concat("Caught an exception trying to ingest file " + fileName + ": " + ingestEx.getLocalizedMessage()); return output; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index f2a1312a150..26637ec5742 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -120,7 +120,8 @@ public void open (DataAccessOption... options) throws IOException { && dataFile.getContentType().equals("text/tab-separated-values") && dataFile.isTabularData() && dataFile.getDataTable() != null - && (!this.noVarHeader())) { + && (!this.noVarHeader()) + && (!dataFile.getDataTable().isStoredWithVariableHeader())) { List datavariables = dataFile.getDataTable().getDataVariables(); String varHeaderLine = generateVariableHeader(datavariables); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 7a6809cb2ff..733daaf1328 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -450,8 +450,12 @@ public void open(DataAccessOption... options) throws IOException { this.setSize(retrieveSizeFromMedia()); } // Only applies for the S3 Connector case (where we could have run an ingest) - if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") - && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) { + if (dataFile.getContentType() != null + && dataFile.getContentType().equals("text/tab-separated-values") + && dataFile.isTabularData() + && dataFile.getDataTable() != null + && (!this.noVarHeader()) + && (!dataFile.getDataTable().isStoredWithVariableHeader())) { List datavariables = dataFile.getDataTable().getDataVariables(); String varHeaderLine = generateVariableHeader(datavariables); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java index 1616bfabf96..bca70259cb7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java @@ -124,8 +124,12 @@ public void open(DataAccessOption... options) throws IOException { logger.fine("Setting size"); this.setSize(retrieveSizeFromMedia()); } - if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") - && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) { + if (dataFile.getContentType() != null + && dataFile.getContentType().equals("text/tab-separated-values") + && dataFile.isTabularData() + && dataFile.getDataTable() != null + && (!this.noVarHeader()) + && (!dataFile.getDataTable().isStoredWithVariableHeader())) { List datavariables = dataFile.getDataTable().getDataVariables(); String varHeaderLine = generateVariableHeader(datavariables); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 8afc365417e..c2143bd4789 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -225,7 +225,8 @@ public void open(DataAccessOption... options) throws IOException { && dataFile.getContentType().equals("text/tab-separated-values") && dataFile.isTabularData() && dataFile.getDataTable() != null - && (!this.noVarHeader())) { + && (!this.noVarHeader()) + && (!dataFile.getDataTable().isStoredWithVariableHeader())) { List datavariables = dataFile.getDataTable().getDataVariables(); String varHeaderLine = generateVariableHeader(datavariables); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java index 105a60ab418..717f46ffd60 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java @@ -142,7 +142,8 @@ public void open(DataAccessOption... options) throws IOException { && dataFile.getContentType().equals("text/tab-separated-values") && dataFile.isTabularData() && dataFile.getDataTable() != null - && (!this.noVarHeader())) { + && (!this.noVarHeader()) + && (!dataFile.getDataTable().isStoredWithVariableHeader())) { List datavariables = dataFile.getDataTable().getDataVariables(); String varHeaderLine = generateVariableHeader(datavariables); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java index 782f7f3a52d..c369010c8cd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java @@ -60,305 +60,26 @@ public class TabularSubsetGenerator implements SubsetGenerator { - private static Logger dbgLog = Logger.getLogger(TabularSubsetGenerator.class.getPackage().getName()); + private static Logger logger = Logger.getLogger(TabularSubsetGenerator.class.getPackage().getName()); - private static int COLUMN_TYPE_STRING = 1; - private static int COLUMN_TYPE_LONG = 2; - private static int COLUMN_TYPE_DOUBLE = 3; - private static int COLUMN_TYPE_FLOAT = 4; - - private static int MAX_COLUMN_BUFFER = 8192; - - private FileChannel fileChannel = null; - - private int varcount; - private int casecount; - private int subsetcount; - - private byte[][] columnEntries = null; - - - private ByteBuffer[] columnByteBuffers; - private int[] columnBufferSizes; - private int[] columnBufferOffsets; - - private long[] columnStartOffsets; - private long[] columnTotalOffsets; - private long[] columnTotalLengths; - - public TabularSubsetGenerator() { - - } - - public TabularSubsetGenerator (DataFile datafile, List variables) throws IOException { - if (!datafile.isTabularData()) { - throw new IOException("DataFile is not tabular data."); - } - - setVarCount(datafile.getDataTable().getVarQuantity().intValue()); - setCaseCount(datafile.getDataTable().getCaseQuantity().intValue()); - - - - StorageIO dataAccess = datafile.getStorageIO(); - if (!dataAccess.isLocalFile()) { - throw new IOException("Subsetting is supported on local files only!"); - } - - //File tabfile = datafile.getFileSystemLocation().toFile(); - File tabfile = dataAccess.getFileSystemPath().toFile(); + //private static int MAX_COLUMN_BUFFER = 8192; - File rotatedImageFile = getRotatedImage(tabfile, getVarCount(), getCaseCount()); - long[] columnEndOffsets = extractColumnOffsets(rotatedImageFile, getVarCount(), getCaseCount()); - - fileChannel = (FileChannel.open(Paths.get(rotatedImageFile.getAbsolutePath()), StandardOpenOption.READ)); - - if (variables == null || variables.size() < 1 || variables.size() > getVarCount()) { - throw new IOException("Illegal number of variables in the subset request"); - } - - subsetcount = variables.size(); - columnTotalOffsets = new long[subsetcount]; - columnTotalLengths = new long[subsetcount]; - columnByteBuffers = new ByteBuffer[subsetcount]; - - + public TabularSubsetGenerator() { - if (subsetcount == 1) { - if (!datafile.getDataTable().getId().equals(variables.get(0).getDataTable().getId())) { - throw new IOException("Variable in the subset request does not belong to the datafile."); - } - dbgLog.fine("single variable subset; setting fileChannel position to "+extractColumnOffset(columnEndOffsets, variables.get(0).getFileOrder())); - fileChannel.position(extractColumnOffset(columnEndOffsets, variables.get(0).getFileOrder())); - columnTotalLengths[0] = extractColumnLength(columnEndOffsets, variables.get(0).getFileOrder()); - columnTotalOffsets[0] = 0; - } else { - columnEntries = new byte[subsetcount][]; - - columnBufferSizes = new int[subsetcount]; - columnBufferOffsets = new int[subsetcount]; - columnStartOffsets = new long[subsetcount]; - - int i = 0; - for (DataVariable var : variables) { - if (!datafile.getDataTable().getId().equals(var.getDataTable().getId())) { - throw new IOException("Variable in the subset request does not belong to the datafile."); - } - columnByteBuffers[i] = ByteBuffer.allocate(MAX_COLUMN_BUFFER); - columnTotalLengths[i] = extractColumnLength(columnEndOffsets, var.getFileOrder()); - columnStartOffsets[i] = extractColumnOffset(columnEndOffsets, var.getFileOrder()); - if (columnTotalLengths[i] < MAX_COLUMN_BUFFER) { - columnByteBuffers[i].limit((int)columnTotalLengths[i]); - } - fileChannel.position(columnStartOffsets[i]); - columnBufferSizes[i] = fileChannel.read(columnByteBuffers[i]); - columnBufferOffsets[i] = 0; - columnTotalOffsets[i] = columnBufferSizes[i]; - i++; - } - } - } - - private int getVarCount() { - return varcount; } - private void setVarCount(int varcount) { - this.varcount = varcount; - } - - private int getCaseCount() { - return casecount; - } - - private void setCaseCount(int casecount) { - this.casecount = casecount; - } - - - /* - * Note that this method operates on the *absolute* column number, i.e. - * the number of the physical column in the tabular file. This is stored - * in DataVariable.FileOrder. - * This "column number" should not be confused with the number of column - * in the subset request; a user can request any number of variable - * columns, in an order that doesn't have to follow the physical order - * of the columns in the file. - */ - private long extractColumnOffset(long[] columnEndOffsets, int column) throws IOException { - if (columnEndOffsets == null || columnEndOffsets.length <= column) { - throw new IOException("Offsets table not initialized; or column out of bounds."); - } - long columnOffset; - - if (column > 0) { - columnOffset = columnEndOffsets[column - 1]; - } else { - columnOffset = getVarCount() * 8; - } - return columnOffset; - } - - /* - * See the comment for the method above. + /** + * This class used to be much more complex. There were methods for subsetting + * from fixed-width field files; including using the optimized, "90 deg. rotated" + * versions of such files (i.e. you create a *columns-wise* copy of your data + * file in which the columns are stored sequentially, and a table of byte + * offsets of each column. You can then read individual variable columns + * for cheap; at the expense of doubling the storage size of your tabular + * data files. These methods were not used, so they were deleted (in Jan. 2024 + * prior to 6.2. + * Please consult git history if you are interested in looking at that code. */ - private long extractColumnLength(long[] columnEndOffsets, int column) throws IOException { - if (columnEndOffsets == null || columnEndOffsets.length <= column) { - throw new IOException("Offsets table not initialized; or column out of bounds."); - } - long columnLength; - - if (column > 0) { - columnLength = columnEndOffsets[column] - columnEndOffsets[column - 1]; - } else { - columnLength = columnEndOffsets[0] - varcount * 8; - } - - return columnLength; - } - - - private void bufferMoreColumnBytes(int column) throws IOException { - if (columnTotalOffsets[column] >= columnTotalLengths[column]) { - throw new IOException("attempt to buffer bytes past the column boundary"); - } - fileChannel.position(columnStartOffsets[column] + columnTotalOffsets[column]); - - columnByteBuffers[column].clear(); - if (columnTotalLengths[column] < columnTotalOffsets[column] + MAX_COLUMN_BUFFER) { - dbgLog.fine("Limiting the buffer to "+(columnTotalLengths[column] - columnTotalOffsets[column])+" bytes"); - columnByteBuffers[column].limit((int) (columnTotalLengths[column] - columnTotalOffsets[column])); - } - columnBufferSizes[column] = fileChannel.read(columnByteBuffers[column]); - dbgLog.fine("Read "+columnBufferSizes[column]+" bytes for subset column "+column); - columnBufferOffsets[column] = 0; - columnTotalOffsets[column] += columnBufferSizes[column]; - } - - public byte[] readColumnEntryBytes(int column) { - return readColumnEntryBytes(column, true); - } - - - public byte[] readColumnEntryBytes(int column, boolean addTabs) { - byte[] leftover = null; - byte[] ret = null; - - if (columnBufferOffsets[column] >= columnBufferSizes[column]) { - try { - bufferMoreColumnBytes(column); - if (columnBufferSizes[column] < 1) { - return null; - } - } catch (IOException ioe) { - return null; - } - } - - int byteindex = columnBufferOffsets[column]; - try { - while (columnByteBuffers[column].array()[byteindex] != '\n') { - byteindex++; - if (byteindex == columnBufferSizes[column]) { - // save the leftover: - if (leftover == null) { - leftover = new byte[columnBufferSizes[column] - columnBufferOffsets[column]]; - System.arraycopy(columnByteBuffers[column].array(), columnBufferOffsets[column], leftover, 0, columnBufferSizes[column] - columnBufferOffsets[column]); - } else { - byte[] merged = new byte[leftover.length + columnBufferSizes[column]]; - - System.arraycopy(leftover, 0, merged, 0, leftover.length); - System.arraycopy(columnByteBuffers[column].array(), 0, merged, leftover.length, columnBufferSizes[column]); - leftover = merged; - merged = null; - } - // read more bytes: - bufferMoreColumnBytes(column); - if (columnBufferSizes[column] < 1) { - return null; - } - byteindex = 0; - } - } - - // presumably, we have found our '\n': - if (leftover == null) { - ret = new byte[byteindex - columnBufferOffsets[column] + 1]; - System.arraycopy(columnByteBuffers[column].array(), columnBufferOffsets[column], ret, 0, byteindex - columnBufferOffsets[column] + 1); - } else { - ret = new byte[leftover.length + byteindex + 1]; - System.arraycopy(leftover, 0, ret, 0, leftover.length); - System.arraycopy(columnByteBuffers[column].array(), 0, ret, leftover.length, byteindex + 1); - } - - } catch (IOException ioe) { - return null; - } - - columnBufferOffsets[column] = (byteindex + 1); - - if (column < columnBufferOffsets.length - 1) { - ret[ret.length - 1] = '\t'; - } - return ret; - } - - public int readSingleColumnSubset(byte[] buffer) throws IOException { - if (columnTotalOffsets[0] == columnTotalLengths[0]) { - return -1; - } - - if (columnByteBuffers[0] == null) { - dbgLog.fine("allocating single column subset buffer."); - columnByteBuffers[0] = ByteBuffer.allocate(buffer.length); - } - - int bytesread = fileChannel.read(columnByteBuffers[0]); - dbgLog.fine("single column subset: read "+bytesread+" bytes."); - if (columnTotalOffsets[0] + bytesread > columnTotalLengths[0]) { - bytesread = (int)(columnTotalLengths[0] - columnTotalOffsets[0]); - } - System.arraycopy(columnByteBuffers[0].array(), 0, buffer, 0, bytesread); - - columnTotalOffsets[0] += bytesread; - columnByteBuffers[0].clear(); - return bytesread > 0 ? bytesread : -1; - } - - - public byte[] readSubsetLineBytes() throws IOException { - byte[] ret = null; - int total = 0; - for (int i = 0; i < subsetcount; i++) { - columnEntries[i] = readColumnEntryBytes(i); - if (columnEntries[i] == null) { - throw new IOException("Failed to read subset line entry"); - } - total += columnEntries[i].length; - } - - ret = new byte[total]; - int offset = 0; - for (int i = 0; i < subsetcount; i++) { - System.arraycopy(columnEntries[i], 0, ret, offset, columnEntries[i].length); - offset += columnEntries[i].length; - } - dbgLog.fine("line: "+new String(ret)); - return ret; - } - - - public void close() { - if (fileChannel != null) { - try { - fileChannel.close(); - } catch (IOException ioe) { - // don't care. - } - } - } - public void subsetFile(String infile, String outfile, List columns, Long numCases) { subsetFile(infile, outfile, columns, numCases, "\t"); } @@ -411,11 +132,15 @@ public void subsetFile(InputStream in, String outfile, List columns, Lo * files, OK to use on small files: */ - public static Double[] subsetDoubleVector(InputStream in, int column, int numCases) { + public static Double[] subsetDoubleVector(InputStream in, int column, int numCases, boolean skipHeader) { Double[] retVector = new Double[numCases]; try (Scanner scanner = new Scanner(in)) { scanner.useDelimiter("\\n"); + if (skipHeader) { + skipFirstLine(scanner); + } + for (int caseIndex = 0; caseIndex < numCases; caseIndex++) { if (scanner.hasNext()) { String[] line = (scanner.next()).split("\t", -1); @@ -463,11 +188,15 @@ public static Double[] subsetDoubleVector(InputStream in, int column, int numCas * Same deal as with the method above - straightforward, but (potentially) slow. * Not a resource hog though - will only try to store one vector in memory. */ - public static Float[] subsetFloatVector(InputStream in, int column, int numCases) { + public static Float[] subsetFloatVector(InputStream in, int column, int numCases, boolean skipHeader) { Float[] retVector = new Float[numCases]; try (Scanner scanner = new Scanner(in)) { scanner.useDelimiter("\\n"); + if (skipHeader) { + skipFirstLine(scanner); + } + for (int caseIndex = 0; caseIndex < numCases; caseIndex++) { if (scanner.hasNext()) { String[] line = (scanner.next()).split("\t", -1); @@ -513,11 +242,15 @@ public static Float[] subsetFloatVector(InputStream in, int column, int numCases * Same deal as with the method above - straightforward, but (potentially) slow. * Not a resource hog though - will only try to store one vector in memory. */ - public static Long[] subsetLongVector(InputStream in, int column, int numCases) { + public static Long[] subsetLongVector(InputStream in, int column, int numCases, boolean skipHeader) { Long[] retVector = new Long[numCases]; try (Scanner scanner = new Scanner(in)) { scanner.useDelimiter("\\n"); + if (skipHeader) { + skipFirstLine(scanner); + } + for (int caseIndex = 0; caseIndex < numCases; caseIndex++) { if (scanner.hasNext()) { String[] line = (scanner.next()).split("\t", -1); @@ -549,11 +282,15 @@ public static Long[] subsetLongVector(InputStream in, int column, int numCases) * Same deal as with the method above - straightforward, but (potentially) slow. * Not a resource hog though - will only try to store one vector in memory. */ - public static String[] subsetStringVector(InputStream in, int column, int numCases) { + public static String[] subsetStringVector(InputStream in, int column, int numCases, boolean skipHeader) { String[] retVector = new String[numCases]; try (Scanner scanner = new Scanner(in)) { scanner.useDelimiter("\\n"); + if (skipHeader) { + skipFirstLine(scanner); + } + for (int caseIndex = 0; caseIndex < numCases; caseIndex++) { if (scanner.hasNext()) { String[] line = (scanner.next()).split("\t", -1); @@ -621,819 +358,10 @@ public static String[] subsetStringVector(InputStream in, int column, int numCas } - /* - * Straightforward method for subsetting a tab-delimited data file, extracting - * all the columns representing continuous variables and returning them as - * a 2-dimensional array of Doubles; - * Inefficient on large files, OK to use on small ones. - */ - public static Double[][] subsetDoubleVectors(InputStream in, Set columns, int numCases) throws IOException { - Double[][] retVector = new Double[columns.size()][numCases]; - try (Scanner scanner = new Scanner(in)) { - scanner.useDelimiter("\\n"); - - for (int caseIndex = 0; caseIndex < numCases; caseIndex++) { - if (scanner.hasNext()) { - String[] line = (scanner.next()).split("\t", -1); - int j = 0; - for (Integer i : columns) { - try { - // TODO: verify that NaN and +-Inf are going to be - // handled correctly here! -- L.A. - // NO, "+-Inf" is not handled correctly; see the - // comment further down below. - retVector[j][caseIndex] = new Double(line[i]); - } catch (NumberFormatException ex) { - retVector[j][caseIndex] = null; // missing value - } - j++; - } - } else { - throw new IOException("Tab file has fewer rows than the stored number of cases!"); - } - } - - int tailIndex = numCases; - while (scanner.hasNext()) { - String nextLine = scanner.next(); - if (!"".equals(nextLine)) { - throw new IOException("Tab file has more nonempty rows than the stored number of cases ("+numCases+")! current index: "+tailIndex+", line: "+nextLine); - } - tailIndex++; - } - - } - return retVector; - - } - - public String[] subsetStringVector(DataFile datafile, int column) throws IOException { - return (String[])subsetObjectVector(datafile, column, COLUMN_TYPE_STRING); - } - - public Double[] subsetDoubleVector(DataFile datafile, int column) throws IOException { - return (Double[])subsetObjectVector(datafile, column, COLUMN_TYPE_DOUBLE); - } - - public Long[] subsetLongVector(DataFile datafile, int column) throws IOException { - return (Long[])subsetObjectVector(datafile, column, COLUMN_TYPE_LONG); - } - - // Float methods are temporary; - // In normal operations we'll be treating all the floating point types as - // doubles. I need to be able to handle floats for some 4.0 vs 3.* ingest - // tests. -- L.A. - - public Float[] subsetFloatVector(DataFile datafile, int column) throws IOException { - return (Float[])subsetObjectVector(datafile, column, COLUMN_TYPE_FLOAT); - } - - public String[] subsetStringVector(File tabfile, int column, int varcount, int casecount) throws IOException { - return (String[])subsetObjectVector(tabfile, column, varcount, casecount, COLUMN_TYPE_STRING); - } - - public Double[] subsetDoubleVector(File tabfile, int column, int varcount, int casecount) throws IOException { - return (Double[])subsetObjectVector(tabfile, column, varcount, casecount, COLUMN_TYPE_DOUBLE); - } - - public Long[] subsetLongVector(File tabfile, int column, int varcount, int casecount) throws IOException { - return (Long[])subsetObjectVector(tabfile, column, varcount, casecount, COLUMN_TYPE_LONG); - } - - public Float[] subsetFloatVector(File tabfile, int column, int varcount, int casecount) throws IOException { - return (Float[])subsetObjectVector(tabfile, column, varcount, casecount, COLUMN_TYPE_FLOAT); - } - - public Object[] subsetObjectVector(DataFile dataFile, int column, int columntype) throws IOException { - if (!dataFile.isTabularData()) { - throw new IOException("DataFile is not tabular data."); - } - - int varcount = dataFile.getDataTable().getVarQuantity().intValue(); - int casecount = dataFile.getDataTable().getCaseQuantity().intValue(); - - if (column >= varcount) { - throw new IOException("Column "+column+" is out of bounds."); - } - - StorageIO dataAccess = dataFile.getStorageIO(); - if (!dataAccess.isLocalFile()) { - throw new IOException("Subsetting is supported on local files only!"); - } - - //File tabfile = datafile.getFileSystemLocation().toFile(); - File tabfile = dataAccess.getFileSystemPath().toFile(); - - if (columntype == COLUMN_TYPE_STRING) { - String filename = dataFile.getFileMetadata().getLabel(); - if (filename != null) { - filename = filename.replaceFirst("^_", ""); - Integer fnumvalue = null; - try { - fnumvalue = new Integer(filename); - } catch (Exception ex){ - fnumvalue = null; - } - if (fnumvalue != null) { - //if ((fnumvalue.intValue() < 112497)) { // && (fnumvalue.intValue() > 60015)) { - if ((fnumvalue.intValue() < 111931)) { // && (fnumvalue.intValue() > 60015)) { - if (!(fnumvalue.intValue() == 60007 - || fnumvalue.intValue() == 59997 - || fnumvalue.intValue() == 60015 - || fnumvalue.intValue() == 59948 - || fnumvalue.intValue() == 60012 - || fnumvalue.intValue() == 52585 - || fnumvalue.intValue() == 60005 - || fnumvalue.intValue() == 60002 - || fnumvalue.intValue() == 59954 - || fnumvalue.intValue() == 60008 - || fnumvalue.intValue() == 54972 - || fnumvalue.intValue() == 55010 - || fnumvalue.intValue() == 54996 - || fnumvalue.intValue() == 53527 - || fnumvalue.intValue() == 53546 - || fnumvalue.intValue() == 55002 - || fnumvalue.intValue() == 55006 - || fnumvalue.intValue() == 54998 - || fnumvalue.intValue() == 52552 - // SPSS/SAV cases with similar issue - compat mode must be disabled - //|| fnumvalue.intValue() == 101826 // temporary - tricky file with accents and v. 16... - || fnumvalue.intValue() == 54618 // another SAV file, with long strings... - || fnumvalue.intValue() == 54619 // [same] - || fnumvalue.intValue() == 57983 - || fnumvalue.intValue() == 58262 - || fnumvalue.intValue() == 58288 - || fnumvalue.intValue() == 58656 - || fnumvalue.intValue() == 59144 - // || fnumvalue.intValue() == 69626 [nope!] - )) { - dbgLog.info("\"Old\" file name detected; using \"compatibility mode\" for a character vector subset;"); - return subsetObjectVector(tabfile, column, varcount, casecount, columntype, true); - } - } - } - } + private static void skipFirstLine(Scanner scanner) { + if (!scanner.hasNext()) { + throw new RuntimeException("Failed to read the variable name header line from the tab-delimited file!"); } - - return subsetObjectVector(tabfile, column, varcount, casecount, columntype); - } - - public Object[] subsetObjectVector(File tabfile, int column, int varcount, int casecount, int columntype) throws IOException { - return subsetObjectVector(tabfile, column, varcount, casecount, columntype, false); - } - - - - public Object[] subsetObjectVector(File tabfile, int column, int varcount, int casecount, int columntype, boolean compatmode) throws IOException { - - Object[] retVector = null; - - boolean isString = false; - boolean isDouble = false; - boolean isLong = false; - boolean isFloat = false; - - //Locale loc = new Locale("en", "US"); - - if (columntype == COLUMN_TYPE_STRING) { - isString = true; - retVector = new String[casecount]; - } else if (columntype == COLUMN_TYPE_DOUBLE) { - isDouble = true; - retVector = new Double[casecount]; - } else if (columntype == COLUMN_TYPE_LONG) { - isLong = true; - retVector = new Long[casecount]; - } else if (columntype == COLUMN_TYPE_FLOAT){ - isFloat = true; - retVector = new Float[casecount]; - } else { - throw new IOException("Unsupported column type: "+columntype); - } - - File rotatedImageFile = getRotatedImage(tabfile, varcount, casecount); - long[] columnEndOffsets = extractColumnOffsets(rotatedImageFile, varcount, casecount); - long columnOffset = 0; - long columnLength = 0; - - if (column > 0) { - columnOffset = columnEndOffsets[column - 1]; - columnLength = columnEndOffsets[column] - columnEndOffsets[column - 1]; - } else { - columnOffset = varcount * 8; - columnLength = columnEndOffsets[0] - varcount * 8; - } - int caseindex = 0; - - try (FileChannel fc = (FileChannel.open(Paths.get(rotatedImageFile.getAbsolutePath()), - StandardOpenOption.READ))) { - fc.position(columnOffset); - int MAX_COLUMN_BUFFER = 8192; - - ByteBuffer in = ByteBuffer.allocate(MAX_COLUMN_BUFFER); - - if (columnLength < MAX_COLUMN_BUFFER) { - in.limit((int) (columnLength)); - } - - long bytesRead = 0; - long bytesReadTotal = 0; - - int byteoffset = 0; - byte[] leftover = null; - - while (bytesReadTotal < columnLength) { - bytesRead = fc.read(in); - byte[] columnBytes = in.array(); - int bytecount = 0; - - while (bytecount < bytesRead) { - if (columnBytes[bytecount] == '\n') { - /* - String token = new String(columnBytes, byteoffset, bytecount-byteoffset, "UTF8"); - - if (leftover != null) { - String leftoverString = new String (leftover, "UTF8"); - token = leftoverString + token; - leftover = null; - } - */ - /* - * Note that the way I was doing it at first - above - - * was not quite the correct way - because I was creating UTF8 - * strings from the leftover bytes, and the bytes in the - * current buffer *separately*; which means, if a multi-byte - * UTF8 character got split in the middle between one buffer - * and the next, both chunks of it would become junk - * characters, on each side! - * The correct way of doing it, of course, is to create a - * merged byte buffer, and then turn it into a UTF8 string. - * -- L.A. 4.0 - */ - String token = null; - - if (leftover == null) { - token = new String(columnBytes, byteoffset, bytecount - byteoffset, "UTF8"); - } else { - byte[] merged = new byte[leftover.length + bytecount - byteoffset]; - - System.arraycopy(leftover, 0, merged, 0, leftover.length); - System.arraycopy(columnBytes, byteoffset, merged, leftover.length, bytecount - byteoffset); - token = new String(merged, "UTF8"); - leftover = null; - merged = null; - } - - if (isString) { - if ("".equals(token)) { - // An empty string is a string missing value! - // An empty string in quotes is an empty string! - retVector[caseindex] = null; - } else { - // Strip the outer quotes: - token = token.replaceFirst("^\\\"", ""); - token = token.replaceFirst("\\\"$", ""); - - // We need to restore the special characters that - // are stored in tab files escaped - quotes, new lines - // and tabs. Before we do that however, we need to - // take care of any escaped backslashes stored in - // the tab file. I.e., "foo\t" should be transformed - // to "foo"; but "foo\\t" should be transformed - // to "foo\t". This way new lines and tabs that were - // already escaped in the original data are not - // going to be transformed to unescaped tab and - // new line characters! - - String[] splitTokens = token.split(Matcher.quoteReplacement("\\\\"), -2); - - // (note that it's important to use the 2-argument version - // of String.split(), and set the limit argument to a - // negative value; otherwise any trailing backslashes - // are lost.) - - for (int i = 0; i < splitTokens.length; i++) { - splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\\""), "\""); - splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\t"), "\t"); - splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\n"), "\n"); - splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\r"), "\r"); - } - // TODO: - // Make (some of?) the above optional; for ex., we - // do need to restore the newlines when calculating UNFs; - // But if we are subsetting these vectors in order to - // create a new tab-delimited file, they will - // actually break things! -- L.A. Jul. 28 2014 - - token = StringUtils.join(splitTokens, '\\'); - - // "compatibility mode" - a hack, to be able to produce - // unfs identical to those produced by the "early" - // unf5 jar; will be removed in production 4.0. - // -- L.A. (TODO: ...) - if (compatmode && !"".equals(token)) { - if (token.length() > 128) { - if ("".equals(token.trim())) { - // don't ask... - token = token.substring(0, 129); - } else { - token = token.substring(0, 128); - // token = String.format(loc, "%.128s", token); - token = token.trim(); - // dbgLog.info("formatted and trimmed: "+token); - } - } else { - if ("".equals(token.trim())) { - // again, don't ask; - // - this replicates some bugginness - // that happens inside unf5; - token = "null"; - } else { - token = token.trim(); - } - } - } - - retVector[caseindex] = token; - } - } else if (isDouble) { - try { - // TODO: verify that NaN and +-Inf are - // handled correctly here! -- L.A. - // Verified: new Double("nan") works correctly, - // resulting in Double.NaN; - // Double("[+-]Inf") doesn't work however; - // (the constructor appears to be expecting it - // to be spelled as "Infinity", "-Infinity", etc. - if ("inf".equalsIgnoreCase(token) || "+inf".equalsIgnoreCase(token)) { - retVector[caseindex] = java.lang.Double.POSITIVE_INFINITY; - } else if ("-inf".equalsIgnoreCase(token)) { - retVector[caseindex] = java.lang.Double.NEGATIVE_INFINITY; - } else if (token == null || token.equals("")) { - // missing value: - retVector[caseindex] = null; - } else { - retVector[caseindex] = new Double(token); - } - } catch (NumberFormatException ex) { - dbgLog.warning("NumberFormatException thrown for " + token + " as Double"); - - retVector[caseindex] = null; // missing value - // TODO: ? - } - } else if (isLong) { - try { - retVector[caseindex] = new Long(token); - } catch (NumberFormatException ex) { - retVector[caseindex] = null; // assume missing value - } - } else if (isFloat) { - try { - if ("inf".equalsIgnoreCase(token) || "+inf".equalsIgnoreCase(token)) { - retVector[caseindex] = java.lang.Float.POSITIVE_INFINITY; - } else if ("-inf".equalsIgnoreCase(token)) { - retVector[caseindex] = java.lang.Float.NEGATIVE_INFINITY; - } else if (token == null || token.equals("")) { - // missing value: - retVector[caseindex] = null; - } else { - retVector[caseindex] = new Float(token); - } - } catch (NumberFormatException ex) { - dbgLog.warning("NumberFormatException thrown for " + token + " as Float"); - retVector[caseindex] = null; // assume missing value (TODO: ?) - } - } - caseindex++; - - if (bytecount == bytesRead - 1) { - byteoffset = 0; - } else { - byteoffset = bytecount + 1; - } - } else { - if (bytecount == bytesRead - 1) { - // We've reached the end of the buffer; - // This means we'll save whatever unused bytes left in - // it - i.e., the bytes between the last new line - // encountered and the end - in the leftover buffer. - - // *EXCEPT*, there may be a case of a very long String - // that is actually longer than MAX_COLUMN_BUFFER, in - // which case it is possible that we've read through - // an entire buffer of bytes without finding any - // new lines... in this case we may need to add this - // entire byte buffer to an already existing leftover - // buffer! - if (leftover == null) { - leftover = new byte[(int) bytesRead - byteoffset]; - System.arraycopy(columnBytes, byteoffset, leftover, 0, (int) bytesRead - byteoffset); - } else { - if (byteoffset != 0) { - throw new IOException("Reached the end of the byte buffer, with some leftover left from the last read; yet the offset is not zero!"); - } - byte[] merged = new byte[leftover.length + (int) bytesRead]; - - System.arraycopy(leftover, 0, merged, 0, leftover.length); - System.arraycopy(columnBytes, byteoffset, merged, leftover.length, (int) bytesRead); - // leftover = null; - leftover = merged; - merged = null; - } - byteoffset = 0; - - } - } - bytecount++; - } - - bytesReadTotal += bytesRead; - in.clear(); - if (columnLength - bytesReadTotal < MAX_COLUMN_BUFFER) { - in.limit((int) (columnLength - bytesReadTotal)); - } - } - - } - - if (caseindex != casecount) { - throw new IOException("Faile to read "+casecount+" tokens for column "+column); - //System.out.println("read "+caseindex+" tokens instead of expected "+casecount+"."); - } - - return retVector; - } - - private long[] extractColumnOffsets (File rotatedImageFile, int varcount, int casecount) throws IOException { - long[] byteOffsets = new long[varcount]; - - try (BufferedInputStream rotfileStream = new BufferedInputStream(new FileInputStream(rotatedImageFile))) { - - byte[] offsetHeader = new byte[varcount * 8]; - - int readlen = rotfileStream.read(offsetHeader); - - if (readlen != varcount * 8) { - throw new IOException("Could not read " + varcount * 8 + " header bytes from the rotated file."); - } - - for (int varindex = 0; varindex < varcount; varindex++) { - byte[] offsetBytes = new byte[8]; - System.arraycopy(offsetHeader, varindex * 8, offsetBytes, 0, 8); - - ByteBuffer offsetByteBuffer = ByteBuffer.wrap(offsetBytes); - byteOffsets[varindex] = offsetByteBuffer.getLong(); - - // System.out.println(byteOffsets[varindex]); - } - - } - - return byteOffsets; - } - - private File getRotatedImage(File tabfile, int varcount, int casecount) throws IOException { - String fileName = tabfile.getAbsolutePath(); - String rotatedImageFileName = fileName + ".90d"; - File rotatedImageFile = new File(rotatedImageFileName); - if (rotatedImageFile.exists()) { - //System.out.println("Image already exists!"); - return rotatedImageFile; - } - - return generateRotatedImage(tabfile, varcount, casecount); - - } - - private File generateRotatedImage (File tabfile, int varcount, int casecount) throws IOException { - // TODO: throw exceptions if bad file, zero varcount, etc. ... - - String fileName = tabfile.getAbsolutePath(); - String rotatedImageFileName = fileName + ".90d"; - - int MAX_OUTPUT_STREAMS = 32; - int MAX_BUFFERED_BYTES = 10 * 1024 * 1024; // 10 MB - for now? - int MAX_COLUMN_BUFFER = 8 * 1024; - - // offsetHeader will contain the byte offsets of the individual column - // vectors in the final rotated image file - byte[] offsetHeader = new byte[varcount * 8]; - int[] bufferedSizes = new int[varcount]; - long[] cachedfileSizes = new long[varcount]; - File[] columnTempFiles = new File[varcount]; - - for (int i = 0; i < varcount; i++) { - bufferedSizes[i] = 0; - cachedfileSizes[i] = 0; - } - - // TODO: adjust MAX_COLUMN_BUFFER here, so that the total size is - // no more than MAX_BUFFERED_BYTES (but no less than 1024 maybe?) - - byte[][] bufferedColumns = new byte [varcount][MAX_COLUMN_BUFFER]; - - // read the tab-delimited file: - - try (FileInputStream tabfileStream = new FileInputStream(tabfile); - Scanner scanner = new Scanner(tabfileStream)) { - scanner.useDelimiter("\\n"); - - for (int caseindex = 0; caseindex < casecount; caseindex++) { - if (scanner.hasNext()) { - String[] line = (scanner.next()).split("\t", -1); - // TODO: throw an exception if there are fewer tab-delimited - // tokens than the number of variables specified. - String token = ""; - int tokensize = 0; - for (int varindex = 0; varindex < varcount; varindex++) { - // TODO: figure out the safest way to convert strings to - // bytes here. Is it going to be safer to use getBytes("UTF8")? - // we are already making the assumption that the values - // in the tab file are in UTF8. -- L.A. - token = line[varindex] + "\n"; - tokensize = token.getBytes().length; - if (bufferedSizes[varindex] + tokensize > MAX_COLUMN_BUFFER) { - // fill the buffer and dump its contents into the temp file: - // (do note that there may be *several* MAX_COLUMN_BUFFERs - // worth of bytes in the token!) - - int tokenoffset = 0; - - if (bufferedSizes[varindex] != MAX_COLUMN_BUFFER) { - tokenoffset = MAX_COLUMN_BUFFER - bufferedSizes[varindex]; - System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], tokenoffset); - } // (otherwise the buffer is already full, and we should - // simply dump it into the temp file, without adding any - // extra bytes to it) - - File bufferTempFile = columnTempFiles[varindex]; - if (bufferTempFile == null) { - bufferTempFile = File.createTempFile("columnBufferFile", "bytes"); - columnTempFiles[varindex] = bufferTempFile; - } - - // *append* the contents of the buffer to the end of the - // temp file, if already exists: - try (BufferedOutputStream outputStream = new BufferedOutputStream( - new FileOutputStream(bufferTempFile, true))) { - outputStream.write(bufferedColumns[varindex], 0, MAX_COLUMN_BUFFER); - cachedfileSizes[varindex] += MAX_COLUMN_BUFFER; - - // keep writing MAX_COLUMN_BUFFER-size chunks of bytes into - // the temp file, for as long as there's more than MAX_COLUMN_BUFFER - // bytes left in the token: - - while (tokensize - tokenoffset > MAX_COLUMN_BUFFER) { - outputStream.write(token.getBytes(), tokenoffset, MAX_COLUMN_BUFFER); - cachedfileSizes[varindex] += MAX_COLUMN_BUFFER; - tokenoffset += MAX_COLUMN_BUFFER; - } - - } - - // buffer the remaining bytes and reset the buffered - // byte counter: - - System.arraycopy(token.getBytes(), - tokenoffset, - bufferedColumns[varindex], - 0, - tokensize - tokenoffset); - - bufferedSizes[varindex] = tokensize - tokenoffset; - - } else { - // continue buffering - System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], tokensize); - bufferedSizes[varindex] += tokensize; - } - } - } else { - throw new IOException("Tab file has fewer rows than the stored number of cases!"); - } - } - } - - // OK, we've created the individual byte vectors of the tab file columns; - // they may be partially saved in temp files and/or in memory. - // We now need to go through all these buffers and create the final - // rotated image file. - - try (BufferedOutputStream finalOut = new BufferedOutputStream( - new FileOutputStream(new File(rotatedImageFileName)))) { - - // but first we should create the offset header and write it out into - // the final file; because it should be at the head, doh! - - long columnOffset = varcount * 8; - // (this is the offset of the first column vector; it is equal to the - // size of the offset header, i.e. varcount * 8 bytes) - - for (int varindex = 0; varindex < varcount; varindex++) { - long totalColumnBytes = cachedfileSizes[varindex] + bufferedSizes[varindex]; - columnOffset += totalColumnBytes; - // totalColumnBytes; - byte[] columnOffsetByteArray = ByteBuffer.allocate(8).putLong(columnOffset).array(); - System.arraycopy(columnOffsetByteArray, 0, offsetHeader, varindex * 8, 8); - } - - finalOut.write(offsetHeader, 0, varcount * 8); - - for (int varindex = 0; varindex < varcount; varindex++) { - long cachedBytesRead = 0; - - // check if there is a cached temp file: - - File cachedTempFile = columnTempFiles[varindex]; - if (cachedTempFile != null) { - byte[] cachedBytes = new byte[MAX_COLUMN_BUFFER]; - try (BufferedInputStream cachedIn = new BufferedInputStream(new FileInputStream(cachedTempFile))) { - int readlen = 0; - while ((readlen = cachedIn.read(cachedBytes)) > -1) { - finalOut.write(cachedBytes, 0, readlen); - cachedBytesRead += readlen; - } - } - - // delete the temp file: - cachedTempFile.delete(); - - } - - if (cachedBytesRead != cachedfileSizes[varindex]) { - throw new IOException("Could not read the correct number of bytes cached for column "+varindex+"; "+ - cachedfileSizes[varindex] + " bytes expected, "+cachedBytesRead+" read."); - } - - // then check if there are any bytes buffered for this column: - - if (bufferedSizes[varindex] > 0) { - finalOut.write(bufferedColumns[varindex], 0, bufferedSizes[varindex]); - } - - } - } - - return new File(rotatedImageFileName); - - } - - /* - * Test method for taking a "rotated" image, and reversing it, reassembling - * all the columns in the original order. Which should result in a file - * byte-for-byte identical file to the original tab-delimited version. - * - * (do note that this method is not efficiently implemented; it's only - * being used for experiments so far, to confirm the accuracy of the - * accuracy of generateRotatedImage(). It should not be used for any - * practical means in the application!) - */ - private void reverseRotatedImage (File rotfile, int varcount, int casecount) throws IOException { - // open the file, read in the offset header: - try (BufferedInputStream rotfileStream = new BufferedInputStream(new FileInputStream(rotfile))) { - byte[] offsetHeader = new byte[varcount * 8]; - long[] byteOffsets = new long[varcount]; - - int readlen = rotfileStream.read(offsetHeader); - - if (readlen != varcount * 8) { - throw new IOException ("Could not read "+varcount*8+" header bytes from the rotated file."); - } - - for (int varindex = 0; varindex < varcount; varindex++) { - byte[] offsetBytes = new byte[8]; - System.arraycopy(offsetHeader, varindex*8, offsetBytes, 0, 8); - - ByteBuffer offsetByteBuffer = ByteBuffer.wrap(offsetBytes); - byteOffsets[varindex] = offsetByteBuffer.getLong(); - - //System.out.println(byteOffsets[varindex]); - } - - String [][] reversedMatrix = new String[casecount][varcount]; - - long offset = varcount * 8; - byte[] columnBytes; - - for (int varindex = 0; varindex < varcount; varindex++) { - long columnLength = byteOffsets[varindex] - offset; - - - - columnBytes = new byte[(int)columnLength]; - readlen = rotfileStream.read(columnBytes); - - if (readlen != columnLength) { - throw new IOException ("Could not read "+columnBytes+" bytes for column "+varindex); - } - /* - String columnString = new String(columnBytes); - //System.out.print(columnString); - String[] values = columnString.split("\n", -1); - - if (values.length < casecount) { - throw new IOException("count mismatch: "+values.length+" tokens found for column "+varindex); - } - - for (int caseindex = 0; caseindex < casecount; caseindex++) { - reversedMatrix[caseindex][varindex] = values[caseindex]; - }*/ - - int bytecount = 0; - int byteoffset = 0; - int caseindex = 0; - //System.out.println("generating value vector for column "+varindex); - while (bytecount < columnLength) { - if (columnBytes[bytecount] == '\n') { - String token = new String(columnBytes, byteoffset, bytecount-byteoffset); - reversedMatrix[caseindex++][varindex] = token; - byteoffset = bytecount + 1; - } - bytecount++; - } - - if (caseindex != casecount) { - throw new IOException("count mismatch: "+caseindex+" tokens found for column "+varindex); - } - offset = byteOffsets[varindex]; - } - - for (int caseindex = 0; caseindex < casecount; caseindex++) { - for (int varindex = 0; varindex < varcount; varindex++) { - System.out.print(reversedMatrix[caseindex][varindex]); - if (varindex < varcount-1) { - System.out.print("\t"); - } else { - System.out.print("\n"); - } - } - } - - } - - - } - - /** - * main() method, for testing - * usage: java edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator testfile.tab varcount casecount column type - * make sure the CLASSPATH contains ... - * - */ - - public static void main(String[] args) { - - String tabFileName = args[0]; - int varcount = new Integer(args[1]).intValue(); - int casecount = new Integer(args[2]).intValue(); - int column = new Integer(args[3]).intValue(); - String type = args[4]; - - File tabFile = new File(tabFileName); - File rotatedImageFile = null; - - TabularSubsetGenerator subsetGenerator = new TabularSubsetGenerator(); - - /* - try { - rotatedImageFile = subsetGenerator.getRotatedImage(tabFile, varcount, casecount); - } catch (IOException ex) { - System.out.println(ex.getMessage()); - } - */ - - //System.out.println("\nFinished generating \"rotated\" column image file."); - - //System.out.println("\nOffsets:"); - - MathContext doubleMathContext = new MathContext(15, RoundingMode.HALF_EVEN); - String FORMAT_IEEE754 = "%+#.15e"; - - try { - //subsetGenerator.reverseRotatedImage(rotatedImageFile, varcount, casecount); - //String[] columns = subsetGenerator.subsetStringVector(tabFile, column, varcount, casecount); - if ("string".equals(type)) { - String[] columns = subsetGenerator.subsetStringVector(tabFile, column, varcount, casecount); - for (int i = 0; i < casecount; i++) { - System.out.println(columns[i]); - } - } else { - - Double[] columns = subsetGenerator.subsetDoubleVector(tabFile, column, varcount, casecount); - for (int i = 0; i < casecount; i++) { - if (columns[i] != null) { - BigDecimal outBigDecimal = new BigDecimal(columns[i], doubleMathContext); - System.out.println(String.format(FORMAT_IEEE754, outBigDecimal)); - } else { - System.out.println("NA"); - } - //System.out.println(columns[i]); - } - } - } catch (IOException ex) { - System.out.println(ex.getMessage()); - } - } -} - - + scanner.next(); + } +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetInputStream.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetInputStream.java deleted file mode 100644 index 89e033353c1..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetInputStream.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.harvard.iq.dataverse.dataaccess; - -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.datavariable.DataVariable; -import java.io.IOException; -import java.io.InputStream; -import java.util.List; -import java.util.logging.Logger; - -/** - * - * @author Leonid Andreev - */ -public class TabularSubsetInputStream extends InputStream { - private static final Logger logger = Logger.getLogger(TabularSubsetInputStream.class.getCanonicalName()); - - private TabularSubsetGenerator subsetGenerator = null; - private int numberOfSubsetVariables; - private int numberOfObservations; - private int numberOfObservationsRead = 0; - private byte[] leftoverBytes = null; - - public TabularSubsetInputStream(DataFile datafile, List variables) throws IOException { - if (datafile == null) { - throw new IOException("Null datafile in subset request"); - } - if (!datafile.isTabularData()) { - throw new IOException("Subset requested on a non-tabular data file"); - } - numberOfObservations = datafile.getDataTable().getCaseQuantity().intValue(); - - if (variables == null || variables.size() < 1) { - throw new IOException("Null or empty list of variables in subset request."); - } - numberOfSubsetVariables = variables.size(); - subsetGenerator = new TabularSubsetGenerator(datafile, variables); - - } - - //@Override - public int read() throws IOException { - throw new IOException("read() method not implemented; do not use."); - } - - //@Override - public int read(byte[] b) throws IOException { - // TODO: - // Move this code into TabularSubsetGenerator - logger.fine("subset input stream: read request, on a "+b.length+" byte buffer;"); - - if (numberOfSubsetVariables == 1) { - logger.fine("calling the single variable subset read method"); - return subsetGenerator.readSingleColumnSubset(b); - } - - int bytesread = 0; - byte [] linebuffer; - - // do we have a leftover? - if (leftoverBytes != null) { - if (leftoverBytes.length < b.length) { - System.arraycopy(leftoverBytes, 0, b, 0, leftoverBytes.length); - bytesread = leftoverBytes.length; - leftoverBytes = null; - - } else { - // shouldn't really happen... unless it's a very large subset, - // or a very long string, etc. - System.arraycopy(leftoverBytes, 0, b, 0, b.length); - byte[] tmp = new byte[leftoverBytes.length - b.length]; - System.arraycopy(leftoverBytes, b.length, tmp, 0, leftoverBytes.length - b.length); - leftoverBytes = tmp; - tmp = null; - return b.length; - } - } - - while (bytesread < b.length && numberOfObservationsRead < numberOfObservations) { - linebuffer = subsetGenerator.readSubsetLineBytes(); - numberOfObservationsRead++; - - if (bytesread + linebuffer.length < b.length) { - // copy linebuffer into the return buffer: - System.arraycopy(linebuffer, 0, b, bytesread, linebuffer.length); - bytesread += linebuffer.length; - } else { - System.arraycopy(linebuffer, 0, b, bytesread, b.length - bytesread); - // save the leftover; - if (bytesread + linebuffer.length > b.length) { - leftoverBytes = new byte[bytesread + linebuffer.length - b.length]; - System.arraycopy(linebuffer, b.length - bytesread, leftoverBytes, 0, bytesread + linebuffer.length - b.length); - } - return b.length; - } - } - - // and this means we've reached the end of the tab file! - - return bytesread > 0 ? bytesread : -1; - } - - //@Override - public void close() { - if (subsetGenerator != null) { - subsetGenerator.close(); - } - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java index 5119b4b96c7..edd01ae98a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java @@ -545,6 +545,16 @@ private void createDataFileDDI(XMLStreamWriter xmlw, Set excludedFieldSe List vars = variableService.findByDataTableId(dt.getId()); if (checkField("catgry", excludedFieldSet, includedFieldSet)) { if (checkIsWithoutFrequencies(vars)) { + // @todo: the method called here to calculate frequencies + // when they are missing from the database (for whatever + // reasons) subsets the physical tab-delimited file and + // calculates them in real time. this is very expensive operation + // potentially. let's make sure that, when we do this, we + // save the resulting frequencies in the database, so that + // we don't have to do this again. Also, let's double check + // whether the "checkIsWithoutFrequencies()" method is doing + // the right thing - as it appears to return true when there + // are no categorical variables in the DataTable (?) calculateFrequencies(df, vars); } } @@ -580,6 +590,7 @@ private boolean checkIsWithoutFrequencies(List vars) { private void calculateFrequencies(DataFile df, List vars) { + // @todo: see the comment in the part of the code that calls this method try { DataConverter dc = new DataConverter(); File tabFile = dc.downloadFromStorageIO(df.getStorageIO()); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 233f746fb17..9bacafd173f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -726,27 +726,17 @@ public void produceSummaryStatistics(DataFile dataFile, File generatedTabularFil } public void produceContinuousSummaryStatistics(DataFile dataFile, File generatedTabularFile) throws IOException { - - /* - // quick, but memory-inefficient way: - // - this method just loads the entire file-worth of continuous vectors - // into a Double[][] matrix. - //Double[][] variableVectors = subsetContinuousVectors(dataFile); - //calculateContinuousSummaryStatistics(dataFile, variableVectors); - - // A more sophisticated way: this subsets one column at a time, using - // the new optimized subsetting that does not have to read any extra - // bytes from the file to extract the column: - - TabularSubsetGenerator subsetGenerator = new TabularSubsetGenerator(); - */ for (int i = 0; i < dataFile.getDataTable().getVarQuantity(); i++) { if (dataFile.getDataTable().getDataVariables().get(i).isIntervalContinuous()) { logger.fine("subsetting continuous vector"); if ("float".equals(dataFile.getDataTable().getDataVariables().get(i).getFormat())) { - Float[] variableVector = TabularSubsetGenerator.subsetFloatVector(new FileInputStream(generatedTabularFile), i, dataFile.getDataTable().getCaseQuantity().intValue()); + Float[] variableVector = TabularSubsetGenerator.subsetFloatVector( + new FileInputStream(generatedTabularFile), + i, + dataFile.getDataTable().getCaseQuantity().intValue(), + dataFile.getDataTable().isStoredWithVariableHeader()); logger.fine("Calculating summary statistics on a Float vector;"); calculateContinuousSummaryStatistics(dataFile, i, variableVector); // calculate the UNF while we are at it: @@ -754,7 +744,11 @@ public void produceContinuousSummaryStatistics(DataFile dataFile, File generated calculateUNF(dataFile, i, variableVector); variableVector = null; } else { - Double[] variableVector = TabularSubsetGenerator.subsetDoubleVector(new FileInputStream(generatedTabularFile), i, dataFile.getDataTable().getCaseQuantity().intValue()); + Double[] variableVector = TabularSubsetGenerator.subsetDoubleVector( + new FileInputStream(generatedTabularFile), + i, + dataFile.getDataTable().getCaseQuantity().intValue(), + dataFile.getDataTable().isStoredWithVariableHeader()); logger.fine("Calculating summary statistics on a Double vector;"); calculateContinuousSummaryStatistics(dataFile, i, variableVector); // calculate the UNF while we are at it: @@ -776,7 +770,11 @@ public void produceDiscreteNumericSummaryStatistics(DataFile dataFile, File gene && dataFile.getDataTable().getDataVariables().get(i).isTypeNumeric()) { logger.fine("subsetting discrete-numeric vector"); - Long[] variableVector = TabularSubsetGenerator.subsetLongVector(new FileInputStream(generatedTabularFile), i, dataFile.getDataTable().getCaseQuantity().intValue()); + Long[] variableVector = TabularSubsetGenerator.subsetLongVector( + new FileInputStream(generatedTabularFile), + i, + dataFile.getDataTable().getCaseQuantity().intValue(), + dataFile.getDataTable().isStoredWithVariableHeader()); // We are discussing calculating the same summary stats for // all numerics (the same kind of sumstats that we've been calculating // for numeric continuous type) -- L.A. Jul. 2014 @@ -810,7 +808,11 @@ public void produceCharacterSummaryStatistics(DataFile dataFile, File generatedT if (dataFile.getDataTable().getDataVariables().get(i).isTypeCharacter()) { logger.fine("subsetting character vector"); - String[] variableVector = TabularSubsetGenerator.subsetStringVector(new FileInputStream(generatedTabularFile), i, dataFile.getDataTable().getCaseQuantity().intValue()); + String[] variableVector = TabularSubsetGenerator.subsetStringVector( + new FileInputStream(generatedTabularFile), + i, + dataFile.getDataTable().getCaseQuantity().intValue(), + dataFile.getDataTable().isStoredWithVariableHeader()); //calculateCharacterSummaryStatistics(dataFile, i, variableVector); // calculate the UNF while we are at it: logger.fine("Calculating UNF on a String vector"); @@ -828,20 +830,29 @@ public static void produceFrequencyStatistics(DataFile dataFile, File generatedT produceFrequencies(generatedTabularFile, vars); } - public static void produceFrequencies( File generatedTabularFile, List vars) throws IOException { + public static void produceFrequencies(File generatedTabularFile, List vars) throws IOException { for (int i = 0; i < vars.size(); i++) { Collection cats = vars.get(i).getCategories(); int caseQuantity = vars.get(i).getDataTable().getCaseQuantity().intValue(); boolean isNumeric = vars.get(i).isTypeNumeric(); + boolean skipVariableHeaderLine = vars.get(i).getDataTable().isStoredWithVariableHeader(); Object[] variableVector = null; if (cats.size() > 0) { if (isNumeric) { - variableVector = TabularSubsetGenerator.subsetFloatVector(new FileInputStream(generatedTabularFile), i, caseQuantity); + variableVector = TabularSubsetGenerator.subsetFloatVector( + new FileInputStream(generatedTabularFile), + i, + caseQuantity, + skipVariableHeaderLine); } else { - variableVector = TabularSubsetGenerator.subsetStringVector(new FileInputStream(generatedTabularFile), i, caseQuantity); + variableVector = TabularSubsetGenerator.subsetStringVector( + new FileInputStream(generatedTabularFile), + i, + caseQuantity, + skipVariableHeaderLine); } if (variableVector != null) { Hashtable freq = calculateFrequency(variableVector); @@ -923,6 +934,7 @@ public boolean ingestAsTabular(Long datafile_id) { DataFile dataFile = fileService.find(datafile_id); boolean ingestSuccessful = false; boolean forceTypeCheck = false; + boolean storingWithVariableHeader = systemConfig.isStoringIngestedFilesWithHeaders(); // Never attempt to ingest a file that's already ingested! if (dataFile.isTabularData()) { @@ -1024,11 +1036,7 @@ public boolean ingestAsTabular(Long datafile_id) { TabularDataIngest tabDataIngest = null; try { - if (additionalData != null) { - tabDataIngest = ingestPlugin.read(inputStream, additionalData); - } else { - tabDataIngest = ingestPlugin.read(inputStream, null); - } + tabDataIngest = ingestPlugin.read(inputStream, storingWithVariableHeader, additionalData); } catch (IOException ingestEx) { dataFile.SetIngestProblem(); FileUtil.createIngestFailureReport(dataFile, ingestEx.getMessage()); @@ -1081,6 +1089,7 @@ public boolean ingestAsTabular(Long datafile_id) { dataFile.setDataTable(tabDataIngest.getDataTable()); tabDataIngest.getDataTable().setDataFile(dataFile); tabDataIngest.getDataTable().setOriginalFileName(originalFileName); + dataFile.getDataTable().setStoredWithVariableHeader(storingWithVariableHeader); try { produceSummaryStatistics(dataFile, tabFile); @@ -1172,6 +1181,7 @@ public boolean ingestAsTabular(Long datafile_id) { // Replace contents of the file with the tab-delimited data produced: dataAccess.savePath(Paths.get(tabFile.getAbsolutePath())); + // Reset the file size: dataFile.setFilesize(dataAccess.getSize()); @@ -2297,7 +2307,7 @@ public static void main(String[] args) { TabularDataIngest tabDataIngest = null; try { - tabDataIngest = ingestPlugin.read(fileInputStream, null); + tabDataIngest = ingestPlugin.read(fileInputStream, false, null); } catch (IOException ingestEx) { System.err.println("Caught an exception trying to ingest file "+file+"."); System.exit(1); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/TabularDataFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/TabularDataFileReader.java index 223b171dfb5..0f23a3d9781 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/TabularDataFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/TabularDataFileReader.java @@ -20,10 +20,13 @@ package edu.harvard.iq.dataverse.ingest.tabulardata; +import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.ingest.tabulardata.spi.*; //import edu.harvard.iq.dataverse.ingest.plugin.metadata.*; import java.io.*; import static java.lang.System.*; +import java.util.Iterator; +import java.util.List; import java.util.regex.Matcher; /** @@ -98,7 +101,7 @@ public void setDataLanguageEncoding(String dataLanguageEncoding) { * * @throws java.io.IOException if a reading error occurs. */ - public abstract TabularDataIngest read(BufferedInputStream stream, File dataFile) + public abstract TabularDataIngest read(BufferedInputStream stream, boolean storeWithVariableHeader, File dataFile) throws IOException; @@ -176,5 +179,26 @@ protected String escapeCharacterString(String rawString) { return escapedString; } + + protected String generateVariableHeader(List dvs) { + String varHeader = null; + + if (dvs != null) { + Iterator iter = dvs.iterator(); + DataVariable dv; + + if (iter.hasNext()) { + dv = iter.next(); + varHeader = dv.getName(); + } + + while (iter.hasNext()) { + dv = iter.next(); + varHeader = varHeader + "\t" + dv.getName(); + } + } + + return varHeader; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReader.java index 57f76df3802..f8816ababb4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReader.java @@ -110,7 +110,7 @@ private void init() throws IOException { * @throws java.io.IOException if a reading error occurs. */ @Override - public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws IOException { + public TabularDataIngest read(BufferedInputStream stream, boolean saveWithVariableHeader, File dataFile) throws IOException { init(); if (stream == null) { @@ -124,7 +124,7 @@ public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws File tabFileDestination = File.createTempFile("data-", ".tab"); PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath()); - int lineCount = readFile(localBufferedReader, dataTable, tabFileWriter); + int lineCount = readFile(localBufferedReader, dataTable, saveWithVariableHeader, tabFileWriter); logger.fine("Tab file produced: " + tabFileDestination.getAbsolutePath()); @@ -136,14 +136,17 @@ public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws } - public int readFile(BufferedReader csvReader, DataTable dataTable, PrintWriter finalOut) throws IOException { + public int readFile(BufferedReader csvReader, DataTable dataTable, boolean saveWithVariableHeader, PrintWriter finalOut) throws IOException { List variableList = new ArrayList<>(); CSVParser parser = new CSVParser(csvReader, inFormat.withHeader()); Map headers = parser.getHeaderMap(); int i = 0; + String variableNameHeader = null; + for (String varName : headers.keySet()) { + // @todo: is .keySet() guaranteed to return the names in the right order? if (varName == null || varName.isEmpty()) { // TODO: // Add a sensible variable name validation algorithm. @@ -158,6 +161,13 @@ public int readFile(BufferedReader csvReader, DataTable dataTable, PrintWriter f dv.setTypeCharacter(); dv.setIntervalDiscrete(); + + if (saveWithVariableHeader) { + variableNameHeader = variableNameHeader == null + ? varName + : variableNameHeader.concat("\t" + varName); + } + i++; } @@ -342,6 +352,14 @@ public int readFile(BufferedReader csvReader, DataTable dataTable, PrintWriter f try (BufferedReader secondPassReader = new BufferedReader(new FileReader(firstPassTempFile))) { parser = new CSVParser(secondPassReader, inFormat.withHeader()); String[] caseRow = new String[headers.size()]; + + // Save the variable name header, if requested + if (saveWithVariableHeader) { + if (variableNameHeader == null) { + throw new IOException("failed to generate the Variable Names header"); + } + finalOut.println(variableNameHeader); + } for (CSVRecord record : parser) { if (!record.isConsistent()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java index 2dec701592e..73818f8fb62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java @@ -505,7 +505,7 @@ private void init() throws IOException { } @Override - public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws IOException { + public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVariableHeader, File dataFile) throws IOException { dbgLog.info("***** DTAFileReader: read() start *****"); if (dataFile != null) { @@ -519,7 +519,7 @@ public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws if (releaseNumber!=104) { decodeExpansionFields(stream); } - decodeData(stream); + decodeData(stream, storeWithVariableHeader); decodeValueLabels(stream); ingesteddata.setDataTable(dataTable); @@ -1665,7 +1665,7 @@ private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOEx dbgLog.fine("parseValueLabelsRelease108(): end"); } - private void decodeData(BufferedInputStream stream) throws IOException { + private void decodeData(BufferedInputStream stream, boolean saveWithVariableHeader) throws IOException { dbgLog.fine("\n***** decodeData(): start *****"); @@ -1719,6 +1719,11 @@ private void decodeData(BufferedInputStream stream) throws IOException { BUT, this needs to be reviewed/confirmed etc! */ //String[][] dateFormat = new String[nvar][nobs]; + + // add the variable header here, if needed + if (saveWithVariableHeader) { + pwout.println(generateVariableHeader(dataTable.getDataVariables())); + } for (int i = 0; i < nobs; i++) { byte[] dataRowBytes = new byte[bytes_per_row]; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java index 22581834676..53607d541de 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java @@ -339,7 +339,7 @@ private void init() throws IOException { } @Override - public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws IOException { + public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVariableHeader, File dataFile) throws IOException { logger.fine("NewDTAFileReader: read() start"); // shit ton of diagnostics (still) needed here!! -- L.A. @@ -363,7 +363,13 @@ public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws // "characteristics" - STATA-proprietary information // (we are skipping it) readCharacteristics(dataReader); - readData(dataReader); + + String variableHeaderLine = null; + + if (storeWithVariableHeader) { + variableHeaderLine = generateVariableHeader(dataTable.getDataVariables()); + } + readData(dataReader, variableHeaderLine); // (potentially) large, (potentially) non-ASCII character strings // saved outside the section, and referenced @@ -707,7 +713,7 @@ private void readCharacteristics(DataReader reader) throws IOException { } - private void readData(DataReader reader) throws IOException { + private void readData(DataReader reader, String variableHeaderLine) throws IOException { logger.fine("Data section; at offset " + reader.getByteOffset() + "; dta map offset: " + dtaMap.getOffset_data()); logger.fine("readData(): start"); reader.readOpeningTag(TAG_DATA); @@ -731,6 +737,11 @@ private void readData(DataReader reader) throws IOException { FileOutputStream fileOutTab = new FileOutputStream(tabDelimitedDataFile); PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); + // add the variable header here, if needed + if (variableHeaderLine != null) { + pwout.println(variableHeaderLine); + } + logger.fine("Beginning to read data stream."); for (int i = 0; i < nobs; i++) { @@ -999,6 +1010,8 @@ private void readSTRLs(DataReader reader) throws IOException { int nobs = dataTable.getCaseQuantity().intValue(); String[] line; + + //@todo: adjust for the case of storing the file with the variable header for (int obsindex = 0; obsindex < nobs; obsindex++) { if (scanner.hasNext()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java index c90b0ea6950..2ee966c3e31 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java @@ -180,7 +180,7 @@ private void init() throws IOException { } @Override - public TabularDataIngest read(BufferedInputStream stream, File additionalData) throws IOException{ + public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVariableHeader, File additionalData) throws IOException{ dbgLog.fine("PORFileReader: read() start"); if (additionalData != null) { @@ -226,7 +226,7 @@ public TabularDataIngest read(BufferedInputStream stream, File additionalData) t headerId = "8S"; } - decode(headerId, bfReader); + decode(headerId, bfReader, storeWithVariableHeader); // for last iteration @@ -382,7 +382,7 @@ public TabularDataIngest read(BufferedInputStream stream, File additionalData) t return ingesteddata; } - private void decode(String headerId, BufferedReader reader) throws IOException{ + private void decode(String headerId, BufferedReader reader, boolean storeWithVariableHeader) throws IOException{ if (headerId.equals("1")) decodeProductName(reader); else if (headerId.equals("2")) decodeLicensee(reader); else if (headerId.equals("3")) decodeFileLabel(reader); @@ -398,7 +398,7 @@ private void decode(String headerId, BufferedReader reader) throws IOException{ else if (headerId.equals("C")) decodeVariableLabel(reader); else if (headerId.equals("D")) decodeValueLabel(reader); else if (headerId.equals("E")) decodeDocument(reader); - else if (headerId.equals("F")) decodeData(reader); + else if (headerId.equals("F")) decodeData(reader, storeWithVariableHeader); } @@ -1099,7 +1099,7 @@ private void decodeDocument(BufferedReader reader) throws IOException { } - private void decodeData(BufferedReader reader) throws IOException { + private void decodeData(BufferedReader reader, boolean storeWithVariableHeader) throws IOException { dbgLog.fine("decodeData(): start"); // TODO: get rid of this "variableTypeFinal"; -- L.A. 4.0 beta int[] variableTypeFinal= new int[varQnty]; @@ -1126,6 +1126,9 @@ private void decodeData(BufferedReader reader) throws IOException { // contents (variable) checker concering decimals Arrays.fill(variableTypeFinal, 0); + if (storeWithVariableHeader) { + pwout.println(StringUtils.join(variableNameList, "\t")); + } // raw-case counter int j = 0; // case diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java index eb1353fd792..50f2f89e354 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java @@ -473,7 +473,7 @@ private void init() throws IOException { * @throws java.io.IOException if a reading error occurs. */ @Override - public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws IOException { + public TabularDataIngest read(BufferedInputStream stream, boolean saveWithVariableHeader, File dataFile) throws IOException { init(); @@ -509,7 +509,7 @@ public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws File tabFileDestination = File.createTempFile("data-", ".tab"); PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), "UTF-8"); - int lineCount = csvFileReader.read(localBufferedReader, dataTable, tabFileWriter); + int lineCount = csvFileReader.read(localBufferedReader, dataTable, saveWithVariableHeader, tabFileWriter); LOG.fine("RDATAFileReader: successfully read "+lineCount+" lines of tab-delimited data."); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RTabFileParser.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RTabFileParser.java index f60b7733463..fbe7e401b57 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RTabFileParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RTabFileParser.java @@ -61,8 +61,8 @@ public RTabFileParser (char delimiterChar) { // should be used. - public int read(BufferedReader csvReader, DataTable dataTable, PrintWriter pwout) throws IOException { - dbgLog.warning("RTabFileParser: Inside R Tab file parser"); + public int read(BufferedReader csvReader, DataTable dataTable, boolean saveWithVariableHeader, PrintWriter pwout) throws IOException { + dbgLog.fine("RTabFileParser: Inside R Tab file parser"); int varQnty = 0; @@ -94,14 +94,17 @@ public int read(BufferedReader csvReader, DataTable dataTable, PrintWriter pwout boolean[] isTimeVariable = new boolean[varQnty]; boolean[] isBooleanVariable = new boolean[varQnty]; + String variableNameHeader = null; + if (dataTable.getDataVariables() != null) { for (int i = 0; i < varQnty; i++) { DataVariable var = dataTable.getDataVariables().get(i); if (var == null) { - // throw exception! + throw new IOException ("null dataVariable passed to the parser"); + } if (var.getType() == null) { - // throw exception! + throw new IOException ("null dataVariable type passed to the parser"); } if (var.isTypeCharacter()) { isCharacterVariable[i] = true; @@ -128,13 +131,24 @@ public int read(BufferedReader csvReader, DataTable dataTable, PrintWriter pwout } } } else { - // throw excepion "unknown variable format type" - ? + throw new IOException ("unknown dataVariable format passed to the parser"); } - + if (saveWithVariableHeader) { + variableNameHeader = variableNameHeader == null + ? var.getName() + : variableNameHeader.concat("\t" + var.getName()); + } } } else { - // throw exception! + throw new IOException ("null dataVariables list passed to the parser"); + } + + if (saveWithVariableHeader) { + if (variableNameHeader == null) { + throw new IOException ("failed to generate the Variable Names header"); + } + pwout.println(variableNameHeader); } while ((line = csvReader.readLine()) != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java index 682b8f1166c..5eecbdfb666 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java @@ -338,7 +338,7 @@ private void init() throws IOException { } } - public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws IOException{ + public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVariableHeader, File dataFile) throws IOException{ dbgLog.info("SAVFileReader: read() start"); if (dataFile != null) { @@ -422,7 +422,7 @@ public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws methodCurrentlyExecuted = "decodeRecordTypeData"; dbgLog.fine("***** SAVFileReader: executing method decodeRecordTypeData"); - decodeRecordTypeData(stream); + decodeRecordTypeData(stream, storeWithVariableHeader); } catch (IllegalArgumentException e) { @@ -2308,7 +2308,7 @@ void decodeRecordType999(BufferedInputStream stream) throws IOException { - void decodeRecordTypeData(BufferedInputStream stream) throws IOException { + void decodeRecordTypeData(BufferedInputStream stream, boolean storeWithVariableHeader) throws IOException { dbgLog.fine("decodeRecordTypeData(): start"); ///String fileUnfValue = null; @@ -2320,9 +2320,9 @@ void decodeRecordTypeData(BufferedInputStream stream) throws IOException { throw new IllegalArgumentException("stream == null!"); } if (isDataSectionCompressed){ - decodeRecordTypeDataCompressed(stream); + decodeRecordTypeDataCompressed(stream, storeWithVariableHeader); } else { - decodeRecordTypeDataUnCompressed(stream); + decodeRecordTypeDataUnCompressed(stream, storeWithVariableHeader); } /* UNF calculation was here... */ @@ -2362,7 +2362,7 @@ PrintWriter createOutputWriter (BufferedInputStream stream) throws IOException { } - void decodeRecordTypeDataCompressed(BufferedInputStream stream) throws IOException { + void decodeRecordTypeDataCompressed(BufferedInputStream stream, boolean storeWithVariableHeader) throws IOException { dbgLog.fine("***** decodeRecordTypeDataCompressed(): start *****"); @@ -2395,7 +2395,10 @@ void decodeRecordTypeDataCompressed(BufferedInputStream stream) throws IOExcepti dbgLog.fine("printFormatTable:\n" + printFormatTable); variableFormatTypeList = new String[varQnty]; - + // write the variable header out, if instructed to do so + if (storeWithVariableHeader) { + pwout.println(generateVariableHeader(dataTable.getDataVariables())); + } for (int i = 0; i < varQnty; i++) { variableFormatTypeList[i] = SPSSConstants.FORMAT_CATEGORY_TABLE.get( @@ -2947,7 +2950,7 @@ void decodeRecordTypeDataCompressed(BufferedInputStream stream) throws IOExcepti } - void decodeRecordTypeDataUnCompressed(BufferedInputStream stream) throws IOException { + void decodeRecordTypeDataUnCompressed(BufferedInputStream stream, boolean storeWithVariableHeader) throws IOException { dbgLog.fine("***** decodeRecordTypeDataUnCompressed(): start *****"); if (stream ==null){ @@ -3013,6 +3016,11 @@ void decodeRecordTypeDataUnCompressed(BufferedInputStream stream) throws IOExcep ///dataTable2 = new Object[varQnty][caseQnty]; // storage of date formats to pass to UNF ///dateFormats = new String[varQnty][caseQnty]; + + // write the variable header out, if instructed to do so + if (storeWithVariableHeader) { + pwout.println(generateVariableHeader(dataTable.getDataVariables())); + } try { for (int i = 0; ; i++){ // case-wise loop diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java index ea3f3868f24..ef91793690e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java @@ -36,7 +36,6 @@ import org.apache.commons.lang3.StringUtils; import org.apache.poi.xssf.eventusermodel.XSSFReader; -import org.apache.poi.xssf.usermodel.XSSFRichTextString; import org.apache.poi.xssf.model.SharedStrings; import org.apache.poi.openxml4j.opc.OPCPackage; import org.xml.sax.Attributes; @@ -81,7 +80,9 @@ private void init() throws IOException { * @throws java.io.IOException if a reading error occurs. */ @Override - public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws IOException { + public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVariableHeader, File dataFile) throws IOException { + // @todo: implement handling of "saveWithVariableHeader" option + init(); TabularDataIngest ingesteddata = new TabularDataIngest(); @@ -118,6 +119,10 @@ public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws String[] caseRow = new String[varQnty]; String[] valueTokens; + // add the variable header here, if needed + if (storeWithVariableHeader) { + finalWriter.println(generateVariableHeader(dataTable.getDataVariables())); + } while ((line = secondPassReader.readLine()) != null) { // chop the line: @@ -549,7 +554,7 @@ public static void main(String[] args) throws Exception { BufferedInputStream xlsxInputStream = new BufferedInputStream(new FileInputStream(new File(args[0]))); - TabularDataIngest dataIngest = testReader.read(xlsxInputStream, null); + TabularDataIngest dataIngest = testReader.read(xlsxInputStream, false, null); dataTable = dataIngest.getDataTable(); diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 627cef08d8b..3b7632f3d9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -598,7 +598,12 @@ Whether Harvesting (OAI) service is enabled * Allows an instance admin to disable Solr search facets on the collection * and dataset pages instantly */ - DisableSolrFacets + DisableSolrFacets, + /** + * When ingesting tabular data files, store the generated tab-delimited + * files *with* the variable names line up top. + */ + StoreIngestedTabularFilesWithVarHeaders ; @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 3c6992f8ec3..ded394833f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1173,4 +1173,12 @@ public boolean isStorageQuotasEnforced() { public Long getTestStorageQuotaLimit() { return settingsService.getValueForKeyAsLong(SettingsServiceBean.Key.StorageQuotaSizeInBytes); } + /** + * Should we store tab-delimited files produced during ingest *with* the + * variable name header line included? + * @return boolean - defaults to false. + */ + public boolean isStoringIngestedFilesWithHeaders() { + return settingsService.isTrueForKey(SettingsServiceBean.Key.StoreIngestedTabularFilesWithVarHeaders, false); + } } diff --git a/src/main/resources/db/migration/V6.1.0.2__8524-store-tabular-files-with-varheaders.sql b/src/main/resources/db/migration/V6.1.0.2__8524-store-tabular-files-with-varheaders.sql new file mode 100644 index 00000000000..7c52a00107a --- /dev/null +++ b/src/main/resources/db/migration/V6.1.0.2__8524-store-tabular-files-with-varheaders.sql @@ -0,0 +1 @@ +ALTER TABLE datatable ADD COLUMN IF NOT EXISTS storedWithVariableHeader BOOLEAN DEFAULT FALSE; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 915f82a6de2..cfc6f9335b3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -16,6 +16,7 @@ import io.restassured.path.xml.XmlPath; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.File; import java.io.IOException; @@ -33,6 +34,8 @@ import jakarta.json.JsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.*; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.jupiter.api.AfterAll; @@ -2483,4 +2486,129 @@ public void testCollectionStorageQuotas() { UtilIT.deleteSetting(SettingsServiceBean.Key.UseStorageQuotas); } + + @Test + public void testIngestWithAndWithoutVariableHeader() throws NoSuchAlgorithmException { + msgt("testIngestWithAndWithoutVariableHeader"); + + // The compact Stata file we'll be using for this test: + // (this file is provided by Stata inc. - it's genuine quality) + String pathToFile = "scripts/search/data/tabular/stata13-auto.dta"; + // The pre-calculated MD5 signature of the *complete* tab-delimited + // file as seen by the final Access API user (i.e., with the variable + // header line in it): + String tabularFileMD5 = "f298c2567cc8eb544e36ad83edf6f595"; + // Expected byte sizes of the generated tab-delimited file as stored, + // with and without the header: + int tabularFileSizeWoutHeader = 4026; + int tabularFileSizeWithHeader = 4113; + + String apiToken = createUserGetToken(); + String dataverseAlias = createDataverseGetAlias(apiToken); + Integer datasetIdA = createDatasetGetId(dataverseAlias, apiToken); + + // Before we do anything else, make sure that the instance is configured + // the "old" way, i.e., to store ingested files without the headers: + UtilIT.deleteSetting(SettingsServiceBean.Key.StoreIngestedTabularFilesWithVarHeaders); + + Response addResponse = UtilIT.uploadFileViaNative(datasetIdA.toString(), pathToFile, apiToken); + addResponse.prettyPrint(); + + addResponse.then().assertThat() + .body("data.files[0].dataFile.contentType", equalTo("application/x-stata-13")) + .body("data.files[0].label", equalTo("stata13-auto.dta")) + .statusCode(OK.getStatusCode()); + + Long fileIdA = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); + assertNotNull(fileIdA); + + // Give file time to ingest + assertTrue(UtilIT.sleepForLock(datasetIdA.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile + "(A)"); + + // Check the metadata to confirm that the file has ingested: + + Response fileDataResponse = UtilIT.getFileData(fileIdA.toString(), apiToken); + fileDataResponse.prettyPrint(); + fileDataResponse.then().assertThat() + .body("data.dataFile.filename", equalTo("stata13-auto.tab")) + .body("data.dataFile.contentType", equalTo("text/tab-separated-values")) + .body("data.dataFile.filesize", equalTo(tabularFileSizeWoutHeader)) + .statusCode(OK.getStatusCode()); + + + // Download the file, verify the checksum: + + Response fileDownloadResponse = UtilIT.downloadFile(fileIdA.intValue(), apiToken); + fileDownloadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + byte[] fileDownloadBytes = fileDownloadResponse.body().asByteArray(); + MessageDigest messageDigest = MessageDigest.getInstance("MD5"); + messageDigest.update(fileDownloadBytes); + byte[] rawDigestBytes = messageDigest.digest(); + String tabularFileMD5calculated = FileUtil.checksumDigestToString(rawDigestBytes); + + msgt("md5 of the downloaded file (saved without the variable name header): "+tabularFileMD5calculated); + + assertEquals(tabularFileMD5, tabularFileMD5calculated); + + // Repeat the whole thing, in another dataset (because we will be uploading + // an identical file), but with the "store with the header setting enabled): + + UtilIT.enableSetting(SettingsServiceBean.Key.StoreIngestedTabularFilesWithVarHeaders); + + Integer datasetIdB = createDatasetGetId(dataverseAlias, apiToken); + + addResponse = UtilIT.uploadFileViaNative(datasetIdB.toString(), pathToFile, apiToken); + addResponse.prettyPrint(); + + addResponse.then().assertThat() + .body("data.files[0].dataFile.contentType", equalTo("application/x-stata-13")) + .body("data.files[0].label", equalTo("stata13-auto.dta")) + .statusCode(OK.getStatusCode()); + + Long fileIdB = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); + assertNotNull(fileIdB); + + // Give file time to ingest + assertTrue(UtilIT.sleepForLock(datasetIdB.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile + "(B)"); + + // Check the metadata to confirm that the file has ingested: + + fileDataResponse = UtilIT.getFileData(fileIdB.toString(), apiToken); + fileDataResponse.prettyPrint(); + fileDataResponse.then().assertThat() + .body("data.dataFile.filename", equalTo("stata13-auto.tab")) + .body("data.dataFile.contentType", equalTo("text/tab-separated-values")) + .body("data.dataFile.filesize", equalTo(tabularFileSizeWithHeader)) + .statusCode(OK.getStatusCode()); + + + // Download the file, verify the checksum, again + + fileDownloadResponse = UtilIT.downloadFile(fileIdB.intValue(), apiToken); + fileDownloadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + fileDownloadBytes = fileDownloadResponse.body().asByteArray(); + messageDigest.reset(); + messageDigest.update(fileDownloadBytes); + rawDigestBytes = messageDigest.digest(); + tabularFileMD5calculated = FileUtil.checksumDigestToString(rawDigestBytes); + + msgt("md5 of the downloaded file (saved with the variable name header): "+tabularFileMD5calculated); + + assertEquals(tabularFileMD5, tabularFileMD5calculated); + + // In other words, whether the file was saved with, or without the header, + // as downloaded by the user, the end result must be the same in both cases! + // In other words, whether that first line with the variable names is already + // in the physical file, or added by Dataverse on the fly, the downloaded + // content must be identical. + + UtilIT.deleteSetting(SettingsServiceBean.Key.StoreIngestedTabularFilesWithVarHeaders); + + // @todo: cleanup? + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java index 96e314324ab..ca64bcc794f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java @@ -99,7 +99,7 @@ private DataFile readFileCalcFreq(String fileName, String type ) { TabularDataIngest tabDataIngest = null; try { - tabDataIngest = ingestPlugin.read(fileInputStream, null); + tabDataIngest = ingestPlugin.read(fileInputStream, false, null); } catch (IOException ingestEx) { tabDataIngest = null; System.out.println("Caught an exception trying to ingest file " + fileName + ": " + ingestEx.getLocalizedMessage()); diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java index fc066ef195e..9afb35918a4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java @@ -52,7 +52,7 @@ public void testRead() { try (BufferedInputStream stream = new BufferedInputStream( new FileInputStream(testFile))) { CSVFileReader instance = new CSVFileReader(new CSVFileReaderSpi(), ','); - File outFile = instance.read(stream, null).getTabDelimitedFile(); + File outFile = instance.read(stream, false, null).getTabDelimitedFile(); result = new BufferedReader(new FileReader(outFile)); logger.fine("Final pass: " + outFile.getPath()); } catch (IOException ex) { @@ -104,7 +104,7 @@ public void testVariables() { try (BufferedInputStream stream = new BufferedInputStream( new FileInputStream(testFile))) { CSVFileReader instance = new CSVFileReader(new CSVFileReaderSpi(), ','); - result = instance.read(stream, null).getDataTable(); + result = instance.read(stream, false, null).getDataTable(); } catch (IOException ex) { fail("" + ex); } @@ -154,7 +154,7 @@ public void testSubset() { new FileInputStream(testFile))) { CSVFileReader instance = new CSVFileReader(new CSVFileReaderSpi(), ','); - ingestResult = instance.read(stream, null); + ingestResult = instance.read(stream, false, null); generatedTabFile = ingestResult.getTabDelimitedFile(); generatedDataTable = ingestResult.getDataTable(); @@ -195,7 +195,7 @@ public void testSubset() { fail("Failed to open generated tab-delimited file for reading" + ioex); } - Double[] columnVector = TabularSubsetGenerator.subsetDoubleVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue()); + Double[] columnVector = TabularSubsetGenerator.subsetDoubleVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue(), false); assertArrayEquals(floatVectors[vectorCount++], columnVector, "column " + i + ":"); } @@ -229,7 +229,7 @@ public void testSubset() { fail("Failed to open generated tab-delimited file for reading" + ioex); } - Long[] columnVector = TabularSubsetGenerator.subsetLongVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue()); + Long[] columnVector = TabularSubsetGenerator.subsetLongVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue(), false); assertArrayEquals(longVectors[vectorCount++], columnVector, "column " + i + ":"); } @@ -256,7 +256,7 @@ public void testSubset() { fail("Failed to open generated tab-delimited file for reading" + ioex); } - String[] columnVector = TabularSubsetGenerator.subsetStringVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue()); + String[] columnVector = TabularSubsetGenerator.subsetStringVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue(), false); assertArrayEquals(stringVectors[vectorCount++], columnVector, "column " + i + ":"); } @@ -298,7 +298,7 @@ public void testVariableUNFs() { new FileInputStream(testFile))) { CSVFileReader instance = new CSVFileReader(new CSVFileReaderSpi(), ','); - ingestResult = instance.read(stream, null); + ingestResult = instance.read(stream, false, null); generatedTabFile = ingestResult.getTabDelimitedFile(); generatedDataTable = ingestResult.getDataTable(); @@ -327,7 +327,7 @@ public void testVariableUNFs() { fail("Failed to open generated tab-delimited file for reading" + ioex); } - Double[] columnVector = TabularSubsetGenerator.subsetDoubleVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue()); + Double[] columnVector = TabularSubsetGenerator.subsetDoubleVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue(), false); try { unf = UNFUtil.calculateUNF(columnVector); } catch (IOException | UnfException ioex) { @@ -345,7 +345,7 @@ public void testVariableUNFs() { fail("Failed to open generated tab-delimited file for reading" + ioex); } - Long[] columnVector = TabularSubsetGenerator.subsetLongVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue()); + Long[] columnVector = TabularSubsetGenerator.subsetLongVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue(), false); try { unf = UNFUtil.calculateUNF(columnVector); @@ -363,7 +363,7 @@ public void testVariableUNFs() { fail("Failed to open generated tab-delimited file for reading" + ioex); } - String[] columnVector = TabularSubsetGenerator.subsetStringVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue()); + String[] columnVector = TabularSubsetGenerator.subsetStringVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue(), false); String[] dateFormats = null; @@ -401,7 +401,7 @@ public void testVariableUNFs() { public void testBrokenCSV() { String brokenFile = "src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/BrokenCSV.csv"; try { - new CSVFileReader(new CSVFileReaderSpi(), ',').read(null, null); + new CSVFileReader(new CSVFileReaderSpi(), ',').read(null, false, null); fail("IOException not thrown on null csv"); } catch (NullPointerException ex) { String expMessage = null; @@ -412,7 +412,7 @@ public void testBrokenCSV() { } try (BufferedInputStream stream = new BufferedInputStream( new FileInputStream(brokenFile))) { - new CSVFileReader(new CSVFileReaderSpi(), ',').read(stream, null); + new CSVFileReader(new CSVFileReaderSpi(), ',').read(stream, false, null); fail("IOException was not thrown when collumns do not align."); } catch (IOException ex) { String expMessage = BundleUtil.getStringFromBundle("ingest.csv.recordMismatch", diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java index 113e9be6b54..8af36d6466d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java @@ -16,7 +16,7 @@ public class DTAFileReaderTest { @Test public void testOs() throws IOException { - TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("scripts/search/data/tabular/50by1000.dta"))), nullDataFile); + TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("scripts/search/data/tabular/50by1000.dta"))), false, nullDataFile); assertEquals("application/x-stata", result.getDataTable().getOriginalFileFormat()); assertEquals("rel_8_or_9", result.getDataTable().getOriginalFormatVersion()); assertEquals(50, result.getDataTable().getDataVariables().size()); diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java index c963346b05e..0f14054f472 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java @@ -25,7 +25,7 @@ public void testAuto() throws IOException { instance = new NewDTAFileReader(null, 117); // From https://www.stata-press.com/data/r13/auto.dta // `strings` shows "
    117" - TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("scripts/search/data/tabular/stata13-auto.dta"))), nullDataFile); + TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("scripts/search/data/tabular/stata13-auto.dta"))), false, nullDataFile); assertEquals("application/x-stata", result.getDataTable().getOriginalFileFormat()); assertEquals("STATA 13", result.getDataTable().getOriginalFormatVersion()); assertEquals(12, result.getDataTable().getDataVariables().size()); @@ -39,7 +39,7 @@ public void testAuto() throws IOException { @Test public void testStrl() throws IOException { instance = new NewDTAFileReader(null, 118); - TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File(base + "strl.dta"))), nullDataFile); + TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File(base + "strl.dta"))), false, nullDataFile); DataTable table = result.getDataTable(); assertEquals("application/x-stata", table.getOriginalFileFormat()); assertEquals("STATA 14", table.getOriginalFormatVersion()); @@ -58,7 +58,7 @@ public void testStrl() throws IOException { @Test public void testDates() throws IOException { instance = new NewDTAFileReader(null, 118); - TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File(base + "dates.dta"))), nullDataFile); + TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File(base + "dates.dta"))), false, nullDataFile); DataTable table = result.getDataTable(); assertEquals("application/x-stata", table.getOriginalFileFormat()); assertEquals("STATA 14", table.getOriginalFormatVersion()); @@ -77,7 +77,7 @@ public void testDates() throws IOException { @Test void testNull() { instance = new NewDTAFileReader(null, 117); - assertThrows(IOException.class, () -> instance.read(null, new File(""))); + assertThrows(IOException.class, () -> instance.read(null, false, new File(""))); } // TODO: Can we create a small file to check into the code base that exercises the value-label names non-zero offset issue? @@ -87,7 +87,7 @@ public void testFirstCategoryNonZeroOffset() throws IOException { instance = new NewDTAFileReader(null, 117); // https://dataverse.harvard.edu/file.xhtml?fileId=2865667 Stata 13 HouseImputingCivilRightsInfo.dta md5=7dd144f27cdb9f8d1c3f4eb9c4744c42 - TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("/tmp/HouseImputingCivilRightsInfo.dta"))), nullDataFile); + TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("/tmp/HouseImputingCivilRightsInfo.dta"))), false, nullDataFile); assertEquals("application/x-stata", result.getDataTable().getOriginalFileFormat()); assertEquals("STATA 13", result.getDataTable().getOriginalFormatVersion()); assertEquals(5, result.getDataTable().getDataVariables().size()); @@ -107,7 +107,7 @@ public void testFirstCategoryNonZeroOffset() throws IOException { public void testFirstCategoryNonZeroOffset1() throws IOException { instance = new NewDTAFileReader(null, 118); // https://dataverse.harvard.edu/file.xhtml?fileId=3140457 Stata 14: 2018_04_06_Aggregated_dataset_v2.dta - TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("/tmp/2018_04_06_Aggregated_dataset_v2.dta"))), nullDataFile); + TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("/tmp/2018_04_06_Aggregated_dataset_v2.dta"))), false, nullDataFile); assertEquals("application/x-stata", result.getDataTable().getOriginalFileFormat()); assertEquals("STATA 14", result.getDataTable().getOriginalFormatVersion()); assertEquals(227, result.getDataTable().getDataVariables().size()); @@ -136,7 +136,7 @@ public void test33k() throws IOException { @Test public void testCharacteristics() throws IOException { instance = new NewDTAFileReader(null, 117); - TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("/tmp/15aa6802ee5-5d2ed1bf55a5.dta"))), nullDataFile); + TabularDataIngest result = instance.read(new BufferedInputStream(new FileInputStream(new File("/tmp/15aa6802ee5-5d2ed1bf55a5.dta"))), false, nullDataFile); assertEquals("application/x-stata", result.getDataTable().getOriginalFileFormat()); assertEquals("STATA 13", result.getDataTable().getOriginalFormatVersion()); assertEquals(441, result.getDataTable().getDataVariables().size()); From 4542b213103ecc18cbf50617696c2997a2a9723d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 7 Feb 2024 16:36:49 -0500 Subject: [PATCH 0636/1112] refactor handleVersion, add includeDeaccession param #10240 --- doc/sphinx-guides/source/api/native-api.rst | 11 ++++-- .../iq/dataverse/api/AbstractApiBean.java | 31 +++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 23 +---------- .../edu/harvard/iq/dataverse/api/Files.java | 38 ++----------------- .../edu/harvard/iq/dataverse/api/FilesIT.java | 11 ++++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 8 ++++ 6 files changed, 60 insertions(+), 62 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index d6f88df3235..5be73c01194 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3529,7 +3529,11 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/files/42/versions/:latest-published/citation" -When the dataset version is a draft or deaccessioned, authentication is required: +When the dataset version is a draft or deaccessioned, authentication is required. + +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "unauthorized" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. .. code-block:: bash @@ -3537,14 +3541,15 @@ When the dataset version is a draft or deaccessioned, authentication is required export SERVER_URL=https://demo.dataverse.org export FILE_ID=42 export DATASET_VERSION=:draft + export INCLUDE_DEACCESSIONED=true - curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$FILE_ID/versions/$DATASET_VERSION/citation" + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$FILE_ID/versions/$DATASET_VERSION/citation?includeDeaccessioned=$INCLUDE_DEACCESSIONED" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/42/versions/:draft/citation + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/42/versions/:draft/citation?includeDeaccessioned=true" If your file has a persistent identifier (PID, such as a DOI), you can pass it using the technique described under :ref:`get-json-rep-of-file`. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index bc94d7f0bcc..3c3e68c4e44 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.RoleAssignee; @@ -15,6 +16,10 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.locality.StorageSiteServiceBean; @@ -390,6 +395,32 @@ protected Dataset findDatasetOrDie(String id) throws WrappedResponse { } } } + + protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { + DatasetVersion dsv = execCommand(handleVersion(versionNumber, new Datasets.DsVersionHandler>() { + + @Override + public Command handleLatest() { + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); + } + + @Override + public Command handleDraft() { + return new GetDraftDatasetVersionCommand(req, ds); + } + + @Override + public Command handleSpecific(long major, long minor) { + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned, checkPermsWhenDeaccessioned); + } + + @Override + public Command handleLatestPublished() { + return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); + } + })); + return dsv; + } protected DataFile findDataFileOrDie(String id) throws WrappedResponse { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e3505cbbb33..2181f189fc0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2727,28 +2727,7 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error. */ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { - DatasetVersion dsv = execCommand(handleVersion(versionNumber, new DsVersionHandler>() { - - @Override - public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); - } - - @Override - public Command handleDraft() { - return new GetDraftDatasetVersionCommand(req, ds); - } - - @Override - public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned, checkPermsWhenDeaccessioned); - } - - @Override - public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); - } - })); + DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); if (dsv == null || dsv.getId() == null) { throw new WrappedResponse(notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index f7cdf2df10b..69bdebb2dd5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -19,7 +19,6 @@ import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; import edu.harvard.iq.dataverse.UserNotificationServiceBean; -import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; @@ -34,11 +33,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.impl.GetDataFileCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetDraftFileMetadataIfAvailableCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.RedetectFileTypeCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UningestFileCommand; @@ -941,44 +936,19 @@ public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathPar /** * @param fileIdOrPersistentId Database ID or PID of the data file. - * @param dsVersionString The version of the dataset, such as 1.0, :draft, + * @param versionNumber The version of the dataset, such as 1.0, :draft, * :latest-published, etc. + * @param includeDeaccessioned Defaults to false. */ @GET @AuthRequired @Path("{id}/versions/{dsVersionString}/citation") - public Response getFileCitationByVersion(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("dsVersionString") String dsVersionString) { + public Response getFileCitationByVersion(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("dsVersionString") String versionNumber, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned) { try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); final DataFile df = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); Dataset ds = df.getOwner(); - // Adapted from getDatasetVersionOrDie - DatasetVersion dsv = execCommand(handleVersion(dsVersionString, new Datasets.DsVersionHandler>() { - - boolean includeDeaccessioned = true; - boolean checkPermsWhenDeaccessioned = true; - - @Override - public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds); - } - - @Override - public Command handleDraft() { - return new GetDraftDatasetVersionCommand(req, ds); - } - - @Override - public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned, checkPermsWhenDeaccessioned); - } - - @Override - public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds); - } - })); - + DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, true); if (dsv == null) { return unauthorized(BundleUtil.getStringFromBundle("files.api.no.draftOrUnauth")); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index f30d7e803ee..dbdb12a3e8d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2698,9 +2698,14 @@ public void testFileCitationByVersion() throws IOException { deaccessionDataset.prettyPrint(); deaccessionDataset.then().assertThat().statusCode(OK.getStatusCode()); - Response getFileCitationV1PostDeaccessionAuthor = UtilIT.getFileCitation(fileId, "1.0", apiToken); - getFileCitationV1PostDeaccessionAuthor.prettyPrint(); - getFileCitationV1PostDeaccessionAuthor.then().assertThat() + Response getFileCitationV1PostDeaccessionAuthorDefault = UtilIT.getFileCitation(fileId, "1.0", apiToken); + getFileCitationV1PostDeaccessionAuthorDefault.prettyPrint(); + getFileCitationV1PostDeaccessionAuthorDefault.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()); + + Response getFileCitationV1PostDeaccessionAuthorIncludeDeaccessioned = UtilIT.getFileCitation(fileId, "1.0", true, apiToken); + getFileCitationV1PostDeaccessionAuthorIncludeDeaccessioned.prettyPrint(); + getFileCitationV1PostDeaccessionAuthorIncludeDeaccessioned.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.message", equalTo("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", " + pidAsUrl + ", Root, V1, DEACCESSIONED VERSION; coffeeshop.png [fileName]")); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index b51d6af75a9..f307275af1f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3475,10 +3475,18 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, boo } static Response getFileCitation(Integer fileId, String datasetVersion, String apiToken) { + Boolean includeDeaccessioned = null; + return getFileCitation(fileId, datasetVersion, includeDeaccessioned, apiToken); + } + + static Response getFileCitation(Integer fileId, String datasetVersion, Boolean includeDeaccessioned, String apiToken) { RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); } + if (includeDeaccessioned != null) { + requestSpecification.queryParam("includeDeaccessioned", includeDeaccessioned); + } return requestSpecification.get("/api/files/" + fileId + "/versions/" + datasetVersion + "/citation"); } From 397dbfb7eb68848650ca8a233462b91532a92970 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 8 Feb 2024 10:41:11 +0000 Subject: [PATCH 0637/1112] Changed: getFileData endpoint using new commands through DsVersionHandler --- .../edu/harvard/iq/dataverse/api/Files.java | 77 ++++++++----------- ...etDraftFileMetadataIfAvailableCommand.java | 1 - ...etLatestAccessibleFileMetadataCommand.java | 3 +- 3 files changed, 32 insertions(+), 49 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 95117162094..be2f093fdcf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -25,15 +25,11 @@ import edu.harvard.iq.dataverse.datasetutility.DataFileTagException; import edu.harvard.iq.dataverse.datasetutility.NoFilesException; import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; -import edu.harvard.iq.dataverse.engine.command.impl.GetDataFileCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDraftFileMetadataIfAvailableCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RedetectFileTypeCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UningestFileCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.export.ExportService; import io.gdcc.spi.export.ExportException; import edu.harvard.iq.dataverse.externaltools.ExternalTool; @@ -49,7 +45,8 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.URLTokenUtil; -import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; +import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -503,70 +500,58 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa @AuthRequired @Path("{id}") public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, null); + return response( req -> getFileDataResponse(req, fileIdOrPersistentId, uriInfo, headers, DS_VERSION_LATEST), getRequestUser(crc)); } @GET @AuthRequired @Path("{id}/versions/{datasetVersionId}") public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("datasetVersionId") String datasetVersionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, datasetVersionId); + return response( req -> getFileDataResponse(req, fileIdOrPersistentId, uriInfo, headers, datasetVersionId), getRequestUser(crc)); } - private Response getFileDataResponse(User user, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, String datasetVersionId){ - - DataverseRequest req; - try { - req = createDataverseRequest(user); - } catch (Exception e) { - return error(BAD_REQUEST, "Error attempting to request information. Maybe a bad API token?"); - } - final DataFile df; + private Response getFileDataResponse(final DataverseRequest req, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, String datasetVersionId) throws WrappedResponse { + final DataFile dataFile; try { - df = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); + dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); } catch (Exception e) { return error(BAD_REQUEST, "Error attempting get the requested data file."); } - FileMetadata fm; - - if (datasetVersionId != null && datasetVersionId.equals(DS_VERSION_DRAFT)) { - try { - fm = execCommand(new GetDraftFileMetadataIfAvailableCommand(req, df)); - } catch (WrappedResponse w) { - return error(BAD_REQUEST, "An error occurred getting a draft version, you may not have permission to access unpublished data on this dataset."); + FileMetadata fileMetadata = execCommand(handleVersion(datasetVersionId, new Datasets.DsVersionHandler<>() { + @Override + public Command handleLatest() { + return new GetLatestAccessibleFileMetadataCommand(req, dataFile); } - if (null == fm) { - return error(BAD_REQUEST, BundleUtil.getStringFromBundle("files.api.no.draft")); + + @Override + public Command handleDraft() { + return new GetDraftFileMetadataIfAvailableCommand(req, dataFile); } - } else { - //first get latest published - //if not available get draft if permissible - try { - fm = df.getLatestPublishedFileMetadata(); + @Override + public Command handleSpecific(long major, long minor) { + return new GetSpecificPublishedFileMetadataByDatasetVersionCommand(req, dataFile, major, minor); + } - } catch (UnsupportedOperationException e) { - try { - fm = execCommand(new GetDraftFileMetadataIfAvailableCommand(req, df)); - } catch (WrappedResponse w) { - return error(BAD_REQUEST, "An error occurred getting a draft version, you may not have permission to access unpublished data on this dataset."); - } - if (null == fm) { - return error(BAD_REQUEST, BundleUtil.getStringFromBundle("files.api.no.draft")); - } + @Override + public Command handleLatestPublished() { + return new GetLatestPublishedFileMetadataCommand(req, dataFile); } + })); + if (fileMetadata == null) { + throw new WrappedResponse(notFound("FileMetadata for DataFile with id " + fileIdOrPersistentId + " in dataset version " + datasetVersionId + " not found")); } - if (fm.getDatasetVersion().isReleased()) { - MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountLoggingServiceBean.MakeDataCountEntry(uriInfo, headers, dvRequestService, df); + if (fileMetadata.getDatasetVersion().isReleased()) { + MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountLoggingServiceBean.MakeDataCountEntry(uriInfo, headers, dvRequestService, dataFile); mdcLogService.logEntry(entry); } return Response.ok(Json.createObjectBuilder() - .add("status", ApiConstants.STATUS_OK) - .add("data", json(fm)).build()) + .add("status", ApiConstants.STATUS_OK) + .add("data", json(fileMetadata)).build()) .type(MediaType.APPLICATION_JSON) .build(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java index 4673f45412a..e0f8ca1fcf8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java index 306221ed86c..980563a5489 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java @@ -2,14 +2,13 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -@RequiredPermissions(Permission.ViewUnpublishedDataset) +@RequiredPermissions({}) public class GetLatestAccessibleFileMetadataCommand extends AbstractCommand { private final DataFile dataFile; From 65de2a532956f690aad73b1784e9ef1d0f55a3e3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 8 Feb 2024 10:49:21 +0000 Subject: [PATCH 0638/1112] Refactor: using Bundle string in response --- .../edu/harvard/iq/dataverse/api/Files.java | 17 ++--------------- src/main/java/propertyFiles/Bundle.properties | 1 + 2 files changed, 3 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index be2f093fdcf..a8e6aa74a42 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -2,20 +2,7 @@ import com.google.gson.Gson; import com.google.gson.JsonObject; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetLock; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.DataverseRequestServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; -import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; -import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; @@ -541,7 +528,7 @@ public Command handleLatestPublished() { })); if (fileMetadata == null) { - throw new WrappedResponse(notFound("FileMetadata for DataFile with id " + fileIdOrPersistentId + " in dataset version " + datasetVersionId + " not found")); + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("files.api.notFoundInVersion", Arrays.asList(fileIdOrPersistentId, datasetVersionId)))); } if (fileMetadata.getDatasetVersion().isReleased()) { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 157f2ecaf54..4ef78c8fe7f 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2633,6 +2633,7 @@ admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java files.api.metadata.update.duplicateFile=Filename already exists at {0} files.api.no.draft=No draft available for this file +files.api.notFoundInVersion="File metadata for file with id {0} in dataset version {1} not found" files.api.only.tabular.supported=This operation is only available for tabular files. #Datasets.java From 153d7d38ef46827a2e4d7651eec3de7b3ee2c1b7 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 8 Feb 2024 11:10:17 +0000 Subject: [PATCH 0639/1112] Changed: FilesIT testGetFileInfo restructure for upcoming new tests --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 33 ++++++++----------- 1 file changed, 14 insertions(+), 19 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 915f82a6de2..feeeb40c133 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1036,7 +1036,7 @@ public void testRestrictFile() { } - @Test + @Test public void testRestrictAddedFile() { msgt("testRestrictAddedFile"); @@ -1141,9 +1141,6 @@ public void testAccessFacet() { UtilIT.setSetting(SettingsServiceBean.Key.PublicInstall, "false"); } - - - @Test public void test_AddFileBadUploadFormat() { @@ -1398,14 +1395,13 @@ public void testDataSizeInDataverse() throws InterruptedException { assertEquals(magicControlString, JsonPath.from(datasetDownloadSizeResponse.body().asString()).getString("data.message")); } - + @Test public void testGetFileInfo() { - Response createUser = UtilIT.createRandomUser(); String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); - Response makeSuperUser = UtilIT.makeSuperUser(username); + UtilIT.makeSuperUser(username); String dataverseAlias = createDataverseGetAlias(apiToken); Integer datasetId = createDatasetGetId(dataverseAlias, apiToken); @@ -1416,29 +1412,23 @@ public void testGetFileInfo() { String pathToFile = "scripts/search/data/binary/trees.png"; Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + // The following tests cover cases where no version ID is specified in the endpoint + // Superuser should get to see draft file data String dataFileId = addResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - msgt("datafile id: " + dataFileId); - - addResponse.prettyPrint(); - Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken); - - getFileDataResponse.prettyPrint(); getFileDataResponse.then().assertThat() .body("data.label", equalTo("trees.png")) .body("data.dataFile.filename", equalTo("trees.png")) .body("data.dataFile.contentType", equalTo("image/png")) .body("data.dataFile.filesize", equalTo(8361)) .statusCode(OK.getStatusCode()); - + + // Regular user should not get to see draft file data getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular); getFileDataResponse.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); - // ------------------------- // Publish dataverse and dataset - // ------------------------- - msg("Publish dataverse and dataset"); Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); publishDataversetResp.then().assertThat() .statusCode(OK.getStatusCode()); @@ -1446,12 +1436,17 @@ public void testGetFileInfo() { Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResp.then().assertThat() .statusCode(OK.getStatusCode()); - //regular user should get to see file data + + // Regular user should get to see published file data getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular); getFileDataResponse.then().assertThat() .statusCode(OK.getStatusCode()); - //cleanup + // The following tests cover cases where a version ID is specified in the endpoint + + // TODO + + // Cleanup Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); assertEquals(200, destroyDatasetResponse.getStatusCode()); From 2fb5247386aacc53742b1d3657e68b3c5df7d420 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 8 Feb 2024 11:39:36 +0000 Subject: [PATCH 0640/1112] Changed: UtilIT getFileData to support new datasetVersionId optional param --- .../edu/harvard/iq/dataverse/api/UtilIT.java | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ec41248a65f..c2d43584b22 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1084,11 +1084,17 @@ static Response getFileMetadata(String fileIdOrPersistentId, String optionalForm .urlEncodingEnabled(false) .get("/api/access/datafile/" + idInPath + "/metadata" + optionalFormatInPath + optionalQueryParam); } - - static Response getFileData(String fileId, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/files/" + fileId ); + + static Response getFileData(String fileId, String apiToken) { + return getFileData(fileId, apiToken, null); + } + + static Response getFileData(String fileId, String apiToken, String datasetVersionId) { + RequestSpecification requestSpec = given().header(API_TOKEN_HTTP_HEADER, apiToken); + if (datasetVersionId != null) { + requestSpec.queryParam("datasetVersionId", datasetVersionId); + } + return requestSpec.get("/api/files/" + fileId); } static Response testIngest(String fileName, String fileType) { From e98ea11a75ca9b808085ca861d86047cc200b77a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 8 Feb 2024 13:34:54 +0000 Subject: [PATCH 0641/1112] Changed: do not overwrite findDataFileOrDie or GetDataFileCommand with bad request --- .../iq/dataverse/api/AbstractApiBean.java | 1 - .../edu/harvard/iq/dataverse/api/Files.java | 8 +-- .../command/impl/GetDataFileCommand.java | 17 +++--- .../edu/harvard/iq/dataverse/api/FilesIT.java | 54 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 12 ++--- 5 files changed, 58 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index bc94d7f0bcc..fe9ee518d75 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -392,7 +392,6 @@ protected Dataset findDatasetOrDie(String id) throws WrappedResponse { } protected DataFile findDataFileOrDie(String id) throws WrappedResponse { - DataFile datafile; if (id.equals(PERSISTENT_ID_KEY)) { String persistentId = getRequestParameter(PERSISTENT_ID_KEY.substring(1)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index a8e6aa74a42..4116bf18973 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -498,13 +498,7 @@ public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id } private Response getFileDataResponse(final DataverseRequest req, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, String datasetVersionId) throws WrappedResponse { - final DataFile dataFile; - try { - dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); - } catch (Exception e) { - return error(BAD_REQUEST, "Error attempting get the requested data file."); - } - + final DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); FileMetadata fileMetadata = execCommand(handleVersion(datasetVersionId, new Datasets.DsVersionHandler<>() { @Override public Command handleLatest() { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataFileCommand.java index fdf47bbd2dd..369f3cbfda6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataFileCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataFileCommand.java @@ -11,35 +11,34 @@ import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + import java.util.Collections; import java.util.Map; import java.util.Set; /** - * * @author Matthew */ // no annotations here, since permissions are dynamically decided // based off GetDatasetCommand for similar permissions checking public class GetDataFileCommand extends AbstractCommand { - private final DataFile df; + private final DataFile dataFile; - public GetDataFileCommand(DataverseRequest aRequest, DataFile anAffectedDataset) { - super(aRequest, anAffectedDataset); - df = anAffectedDataset; + public GetDataFileCommand(DataverseRequest aRequest, DataFile dataFile) { + super(aRequest, dataFile); + this.dataFile = dataFile; } @Override public DataFile execute(CommandContext ctxt) throws CommandException { - return df; + return dataFile; } @Override public Map> getRequiredPermissions() { return Collections.singletonMap("", - df.isReleased() ? Collections.emptySet() - : Collections.singleton(Permission.ViewUnpublishedDataset)); + dataFile.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataset)); } - } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index feeeb40c133..d84b0ed77ac 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -12,6 +12,7 @@ import io.restassured.path.json.JsonPath; import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST_PUBLISHED; import static io.restassured.path.json.JsonPath.with; import io.restassured.path.xml.XmlPath; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -1400,22 +1401,22 @@ public void testDataSizeInDataverse() throws InterruptedException { public void testGetFileInfo() { Response createUser = UtilIT.createRandomUser(); String username = UtilIT.getUsernameFromResponse(createUser); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String superUserApiToken = UtilIT.getApiTokenFromResponse(createUser); UtilIT.makeSuperUser(username); - String dataverseAlias = createDataverseGetAlias(apiToken); - Integer datasetId = createDatasetGetId(dataverseAlias, apiToken); + String dataverseAlias = createDataverseGetAlias(superUserApiToken); + Integer datasetId = createDatasetGetId(dataverseAlias, superUserApiToken); createUser = UtilIT.createRandomUser(); String apiTokenRegular = UtilIT.getApiTokenFromResponse(createUser); msg("Add a non-tabular file"); String pathToFile = "scripts/search/data/binary/trees.png"; - Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, superUserApiToken); // The following tests cover cases where no version ID is specified in the endpoint // Superuser should get to see draft file data String dataFileId = addResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken); + Response getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken); getFileDataResponse.then().assertThat() .body("data.label", equalTo("trees.png")) .body("data.dataFile.filename", equalTo("trees.png")) @@ -1426,14 +1427,14 @@ public void testGetFileInfo() { // Regular user should not get to see draft file data getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular); getFileDataResponse.then().assertThat() - .statusCode(BAD_REQUEST.getStatusCode()); + .statusCode(UNAUTHORIZED.getStatusCode()); // Publish dataverse and dataset - Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); - publishDataversetResp.then().assertThat() + Response publishDataverseResp = UtilIT.publishDataverseViaSword(dataverseAlias, superUserApiToken); + publishDataverseResp.then().assertThat() .statusCode(OK.getStatusCode()); - Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", superUserApiToken); publishDatasetResp.then().assertThat() .statusCode(OK.getStatusCode()); @@ -1443,14 +1444,45 @@ public void testGetFileInfo() { .statusCode(OK.getStatusCode()); // The following tests cover cases where a version ID is specified in the endpoint + // Superuser should not get to see draft file data when no draft version exists + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_DRAFT); + getFileDataResponse.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + + // Update the file metadata + String newFileName = "trees_2.png"; + JsonObjectBuilder updateFileMetadata = Json.createObjectBuilder() + .add("label", newFileName); + Response updateFileMetadataResponse = UtilIT.updateFileMetadata(dataFileId, updateFileMetadata.build().toString(), superUserApiToken); + updateFileMetadataResponse.then().statusCode(OK.getStatusCode()); + // Superuser should get to see draft file data + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_DRAFT); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Regular user should not get to see draft file data + getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, DS_VERSION_DRAFT); + getFileDataResponse.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()); + + // Publish dataset once again + publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", superUserApiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Regular user should get to see latest published file data + getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, DS_VERSION_LATEST_PUBLISHED); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo(newFileName)); // TODO // Cleanup - Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, superUserApiToken); assertEquals(200, destroyDatasetResponse.getStatusCode()); - Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, superUserApiToken); assertEquals(200, deleteDataverseResponse.getStatusCode()); Response deleteUserResponse = UtilIT.deleteUser(username); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index c2d43584b22..f6f2c9a3c03 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1086,15 +1086,15 @@ static Response getFileMetadata(String fileIdOrPersistentId, String optionalForm } static Response getFileData(String fileId, String apiToken) { - return getFileData(fileId, apiToken, null); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + fileId); } static Response getFileData(String fileId, String apiToken, String datasetVersionId) { - RequestSpecification requestSpec = given().header(API_TOKEN_HTTP_HEADER, apiToken); - if (datasetVersionId != null) { - requestSpec.queryParam("datasetVersionId", datasetVersionId); - } - return requestSpec.get("/api/files/" + fileId); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + fileId + "/versions/" + datasetVersionId); } static Response testIngest(String fileName, String fileType) { From 8abeaf06ce1c24a5b0cc7c17954c307727746703 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 8 Feb 2024 09:06:57 -0500 Subject: [PATCH 0642/1112] #10286 add breadcrumbs to dataset api --- .../harvard/iq/dataverse/api/Datasets.java | 5 +- .../iq/dataverse/util/json/JsonPrinter.java | 49 ++++++++++++++++++- 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e3505cbbb33..60c07815b71 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -186,11 +186,12 @@ public interface DsVersionHandler { @GET @AuthRequired @Path("{id}") - public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { + public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("breadcrumbs") Boolean breadcrumbs) { return response( req -> { final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved)); - final JsonObjectBuilder jsonbuilder = json(retrieved); + Boolean includeBreadcrumbs = breadcrumbs == null ? false : breadcrumbs; + final JsonObjectBuilder jsonbuilder = json(retrieved, includeBreadcrumbs); //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum) if((latest != null) && latest.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 2eaf6b64579..197c46ac474 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -55,6 +55,7 @@ import jakarta.ejb.Singleton; import jakarta.json.JsonArray; import jakarta.json.JsonObject; +import java.math.BigDecimal; /** * Convert objects to Json. @@ -303,6 +304,45 @@ public static JsonArrayBuilder json(List dataverseContacts) { } return jsonArrayOfContacts; } + + public static JsonArrayBuilder getBreadcrumbsFromDvObject(DvObject dvObject) { + + List ownerList = new ArrayList(); + + while (dvObject != null) { + ownerList.add(dvObject); + dvObject = dvObject.getOwner(); + } + + JsonArrayBuilder jsonArrayOfBreadcrumbs = Json.createArrayBuilder(); + + for (DvObject dvo : ownerList){ + JsonObjectBuilder breadcrumbObject = jsonObjectBuilder(); + if (dvo.isInstanceofDataverse()){ + Dataverse in = (Dataverse) dvo; + breadcrumbObject.add("identifier", in.getAlias()); + } + if (dvo.isInstanceofDataset() || dvo.isInstanceofDataFile() ){ + if (dvo.getIdentifier() != null){ + breadcrumbObject.add("identifier", dvo.getIdentifier()); + } else { + breadcrumbObject.add("identifier", dvo.getId()); + } + } + if (dvo.isInstanceofDataverse()){ + breadcrumbObject.add("type", "DATAVERSE"); + } + if (dvo.isInstanceofDataset()){ + breadcrumbObject.add("type", "DATASET"); + } + if (dvo.isInstanceofDataFile()){ + breadcrumbObject.add("type", "DATAFILE"); + } + breadcrumbObject.add("displayName", dvo.getDisplayName()); + jsonArrayOfBreadcrumbs.add(breadcrumbObject); + } + return jsonArrayOfBreadcrumbs; + } public static JsonObjectBuilder json( DataverseTheme theme ) { final NullSafeJsonBuilder baseObject = jsonObjectBuilder() @@ -326,8 +366,12 @@ public static JsonObjectBuilder json(BuiltinUser user) { .add("id", user.getId()) .add("userName", user.getUserName()); } + + public static JsonObjectBuilder json(Dataset ds){ + return json(ds, false); + } - public static JsonObjectBuilder json(Dataset ds) { + public static JsonObjectBuilder json(Dataset ds, Boolean includeBreadcrumbs) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", ds.getId()) .add("identifier", ds.getIdentifier()) @@ -340,6 +384,9 @@ public static JsonObjectBuilder json(Dataset ds) { if (DvObjectContainer.isMetadataLanguageSet(ds.getMetadataLanguage())) { bld.add("metadataLanguage", ds.getMetadataLanguage()); } + if (includeBreadcrumbs){ + bld.add("ownerArray", getBreadcrumbsFromDvObject(ds)); + } return bld; } From 572b9cbbd0264b068bb324c69a6d2a2a06f6337e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 8 Feb 2024 09:30:19 -0500 Subject: [PATCH 0643/1112] #10286 move owner type to beginning --- .../iq/dataverse/util/json/JsonPrinter.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 197c46ac474..0803001fbfd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -318,6 +318,15 @@ public static JsonArrayBuilder getBreadcrumbsFromDvObject(DvObject dvObject) { for (DvObject dvo : ownerList){ JsonObjectBuilder breadcrumbObject = jsonObjectBuilder(); + if (dvo.isInstanceofDataverse()){ + breadcrumbObject.add("type", "DATAVERSE"); + } + if (dvo.isInstanceofDataset()){ + breadcrumbObject.add("type", "DATASET"); + } + if (dvo.isInstanceofDataFile()){ + breadcrumbObject.add("type", "DATAFILE"); + } if (dvo.isInstanceofDataverse()){ Dataverse in = (Dataverse) dvo; breadcrumbObject.add("identifier", in.getAlias()); @@ -329,15 +338,6 @@ public static JsonArrayBuilder getBreadcrumbsFromDvObject(DvObject dvObject) { breadcrumbObject.add("identifier", dvo.getId()); } } - if (dvo.isInstanceofDataverse()){ - breadcrumbObject.add("type", "DATAVERSE"); - } - if (dvo.isInstanceofDataset()){ - breadcrumbObject.add("type", "DATASET"); - } - if (dvo.isInstanceofDataFile()){ - breadcrumbObject.add("type", "DATAFILE"); - } breadcrumbObject.add("displayName", dvo.getDisplayName()); jsonArrayOfBreadcrumbs.add(breadcrumbObject); } From d9d0d3b282d0b51697c81e1f0848788819e2d052 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 8 Feb 2024 11:13:23 -0500 Subject: [PATCH 0644/1112] copied changes from original pr --- .../edu/harvard/iq/dataverse/util/FileUtil.java | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 8decf74fe13..6dbcb93358e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -501,24 +501,15 @@ public static String determineFileType(File f, String fileName) throws IOExcepti if ("application/x-gzip".equals(fileType)) { logger.fine("we'll run additional checks on this gzipped file."); - // We want to be able to support gzipped FITS files, same way as - // if they were just regular FITS files: - FileInputStream gzippedIn = new FileInputStream(f); - // (new FileInputStream() can throw a "filen not found" exception; - // however, if we've made it this far, it really means that the - // file does exist and can be opened) - InputStream uncompressedIn = null; - try { - uncompressedIn = new GZIPInputStream(gzippedIn); + try (FileInputStream gzippedIn = new FileInputStream(f); + InputStream uncompressedIn = new GZIPInputStream(gzippedIn)) { if (isFITSFile(uncompressedIn)) { fileType = "application/fits-gzipped"; } } catch (IOException ioex) { - if (uncompressedIn != null) { - try {uncompressedIn.close();} catch (IOException e) {} - } + logger.warning("IOException while processing gzipped FITS file: " + ioex.getMessage()); } - } + } if ("application/zip".equals(fileType)) { // Is this a zipped Shapefile? From a21f44fcc0dfb3a6ba3fa09e5120bbe9c782e5c9 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 8 Feb 2024 12:05:24 -0500 Subject: [PATCH 0645/1112] adding unit test --- .../harvard/iq/dataverse/util/FileUtilTest.java | 9 +++++++++ .../resources/fits/FOSy19g0309t_c2f.fits.gz | Bin 0 -> 19935 bytes 2 files changed, 9 insertions(+) create mode 100644 src/test/resources/fits/FOSy19g0309t_c2f.fits.gz diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java index 2cfe9f25d7e..a459b39c7db 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java @@ -371,4 +371,13 @@ public void testHdf4File() throws IOException { assertEquals("application/octet-stream", contentType); } + @Test + public void testGZipFile() throws IOException { + String path = "src/test/resources/fits/"; + String pathAndFile = path + "FOSy19g0309t_c2f.fits.gz"; + File file = new File(pathAndFile); + String contentType = FileUtil.determineFileType(file, pathAndFile); + assertEquals("application/fits-gzipped", contentType); + } + } diff --git a/src/test/resources/fits/FOSy19g0309t_c2f.fits.gz b/src/test/resources/fits/FOSy19g0309t_c2f.fits.gz new file mode 100644 index 0000000000000000000000000000000000000000..15ee3d5d32318eaf7906423312b0f4f1be4cbf03 GIT binary patch literal 19935 zcmV()K;OR~iwFp6l|p3z14d6%c`-R>Ff%YYbYEjKW-exFbaMdgns-3W?f?HvOR_?? z9OL=QN}YzvPD`{j?3Fs{(9%Hm%!-WC(A1JL648(%MM+vzC{m%cG?e*!pM(46zH#68 zXMDfEKfbR&8t1&v&(*C($ZpC+1Q&qbM02L9PGJv&MZ3@+Z7x~ zr|&*wpgv#Mg!L2scsv9ChgkMjEc5joryo7_XCC`UJO%j)N(}EG3ZJL&3;q6~Xw5g( zGSK|RdYD>s962mob63_14$JB5;&9eGbL^bB_I7`q0Yf7b-Gx71Use8ke14yqK6^** zO0J!`4U21QzLN7Z{S1wDb#x7QJQ)*HSxt3K6PC<3Z_BZC?VOys&MwZ3E-Ytj##X*? zojI1w|C}#o#I^bI4ZMFf)lIb+9$P_9UPVq#>3g0g^T}^Mw9ed#W#P!-<9;OCx_Vj$ z>iRsMD_d=)yrR6C^D>LI&f_f_~C(P9kCnqzLsSPvg4iff0|Vt!Jois%xP0y?(#(EX|$Gon0M%I&+_e8LWxEj)B=v?L|k!(9FQ}Z!ZXM z1$bj-^jpAI)8UQRbi9Uww#--k{-N-A8hYv`3{OK{kN?v5wwjs} zOI~53lF~#)HJN`()G(f_uJ^nDvd7DREB{>v>}3q@KgMI`PFPA||zl~Q_Vf+L|Wfetvwz9&6 ze?>nn4L0d5lZ2Cyf7h{A~em{`fo@!x^*qFa2BN z)tb*JXlDF%eHGc`6_nH_sH(B$71%0@|C~2}nW46}iI(YqD!wfE5A*)(!em@yLo*}B zkc^qK!BorGWFpJZ#N>Zlt>5HF`WYE&8vXyGp&Uz_@fcoOVPdz5hOw@Zsi84zw$_4q zhQ^wIt)#AjiK(#}zh2We{Nv-i>VV7Cla4O79Hu|u+Ob@m{-VAzHZ(N-Qvdx{4}HgT zw6}NWS9(l^#j8i{S2%GT*P8R|RVU7$S{q(%^`-t}LjM=ax1;Bmj(=VEeU%wz2D&_j z_&gmGISu_WV-(~}yGBXER6kW8E6^nYOpj64dycD zr)y~N_m`J7Q%hZwDGz?$&p26dId&GDua&qB*N($<%>&sci zv2bRYJ32Bg(l0+#Q$B9V=;BU?hIq5wP6-kTTflbgr&}Opt=T{x*Fs;+Ee>G%HCd6ql-^%dR z^=D`qfA=PjXUTD9*4N(gOEj3sGUxUG<5(+fTzaA*E|Cr&*Xwfk*9=L_@K5zvqqk~GwRLMNAM%vG({DY?$(iHu7Xf;KwvMj8k=Bp(E7$Rk^47fhTfxNVFEQ)e&7q%(wjTR?p1Bp9*Sax0 zHlO#|Kdfxbon@`K9GkzQYU?R{$Fq|&|NTuK&xvDU&*=7dfx2_Q*Uv)Hg{`Q>R$%l~ z_(ngj9nVcT4Bd_wUeo+`0hUY5(UB3JY;19y^*x&KYSIMg=Dwy+l?C*JB6WcGr z3{T-Z-e>)+csvFE{HZx>vgO%owmhEF@AUyQK1bKTGldLK`3L%~ z;_;Nfj@QYWYxOrf1%@}_JN+zHv*neQCh&L@zWSRD$8M#wHDfItD=RLq{{1i0ZFwvxOG&z@aX`SZtEGuMK*{r^p* zwqSlbvmBWy@ju=#eV^Y<=INSh%=~FK`?F{mm@d;WH0I@V4MW~hoT=9L{pIJOIop3T zM&?g`bmdLwAH`5x`zy~-U)S{eH-EGqe<4Oj8o$=>XYu7H!sOpy^S)VCCzln@j^@8s zQ%7I>*Syd6#)MjqBfpRPhW{hc*87X~_&xFR4le&vOlB|m3;lTh`!@#qMdqo`{Wb3! z|NT7ll^i>cqxsL++IsWG%{}$=9Xc-&-g}pf1JD9KJpWl5hRW0mo9bBA$hGlxo zU)%E!`%6oX1INyiw`auIrVD>!|EB_j9Lq@E)Kts(pKOwRBf18r-|RpCLB8*gxOUE* zm7mw+=Yv}?L)}E{M?C)4Hp6peJmRl5)xPB!Gwtfn?hDBCd5%o0`nQ1#KDS#x=CQvU zuj}9Hw@i%GHMD-wPrH6rZw!ln`tABxNE1C>4XwYYpEHxySxz=wi(e%n1I9XdzkkI0 zL;vew?#z@1cAvM39GRNW-sR7`P6ozW>Y6{{{lQ+oAU^YKIkxsp1!7^tG5@Y7Hqh75 z(^5A!`5WW?7V#NsZn4IN`=yHeBK-Abe|P@zyZl~1^Vi{X(aYOQoycMy9>)uw<5)8^ zW{+bTnCX4P_74xxXu>VtQ8#RbsUb}mk#H9S@3h7sv>WHW5J(!|EuzSef5uH zs-Es#t#8gle-}lb@cIu63m01#CaE*2$ezg>pWT=>insYbMPgW@eSaR$ zi+>7A6BLx#Dop%SD_nT~YiG}O;(XbA|J-=la(}2N?{sjPsrn4Y-@a>o`SM$RYHwxb#M>kv z#c1m+>%ev3@H;uCc3^_56R+^Hx3h8OZDHx_8Zaf1)*tj!Ql6kTo-NOdf3{5X%$vV& z>Y`E1(4Cwe?QNa@ya~pv#}9ZDRM{%ymDwuatjFKv{ntf~Wn#+shKZ>z@4n7I_7C3O z9NuqcJroq=`1NT3>+?~kwl+^c`9HnM(@%cNC_8(`sx2M(8rqB=$6`+z_51(4 zhS7TCQXT<064!Hls`|g9EGdgROHD~?*f~oLD zj(;R#YG$m(%&mzoKdSx%`EuWfJ%txp*s2P@U-tPDU--wDEORG|&-W3(9<%-yX82L% zcb=w}#_v2uwQnx_FgykRd5?t+@A}~1VEvDX+;3Mfm|9)i&{$vnA5Edw95Y=5L*D%> znOO>dG#LK=JySy2*{}aPRtqM`IPnjFEbQ$aE%_HISx&ri#FqaN6Eg#K-Zh$U;yqhI zL7B}~`m!EOY+~H@Yf0ebV#nXNn#*P}am?w@GpKK9$OnGQW2>>{71e&ov$ePXHIUJ7 znYy0oxA{m(nXSwazpSsh4ez?BZ5l$c{`~3Jgodw2*CgaQuW*ZpJGx*ON4=s|j18Gc!>FvQ`L{pL`|y|hF9tdqN=%b4tD$FTrpezlm-({)!qlFQjAMPN z^Xz_g34w|CJS6L%#`|w;NY*HRsN*j&?|vr__)p^f&shHr`8?;m^Zh@Hl7a%S9yii5 z`Z9;So7ucQCY~+-)J~Ykm+=w?@BXuj($~F+FK^0x;r(XQ-|)D07B(*YJw_)cCGfW{ zd7{UJG6OSSJ^q#V&6mmiQ9pjw{L6!H6RkNsB#-y!H^1_}#V~8aG>FU#mftJ(JCA36 zzvg`g$hq*(2fr|WX@≠Rmb#lmC<1 z)oAa?-4rl(1O*1Zq2Podvgl_^+hdAI@m)CiNgL5#jmfldjX&wFE2GIr1xPe=1r6Q> z5^F0Zg9&alUU?xYobFAWlJ(?q?h>v4*oQV9*g}&eU(sf6K20>aOS^_VB;k22G;mrS zjnyAUx(Un3;*u^+623(Jmll$lNE2;0v81IZSCLjv1)4lNjV8N!lgP09WGl3j*iR~n zeRDj`?LC-;J+_d|nlR!V?M_SQrjT~$f#lA)OIMTPDDvDLI@BkXHrmJ0lGSI)PO^qL z`!r}m@gve5f1OtKttPJjaB|C9L=Hi=`?$h_w<(!C^4^AuD`ZSiKBHCB#BU9zPadMPw&wFZqk z+(EP2l1XaQ22!j#OcKoxNTOmJvGOcXA*WB$UKONnvWa@@Tp*b{ThO5uL(_c~NJSxq zrrozDI@3UEtry66`Zn62IGCp45a|z+C#|#xq|#$J>8u$W*r@w^p5uG z#nL*j7UH_RpgFa%WImyiY%Q;oaY`^PeN#)@7hj_3D`Uwi=Q$lZKZ$mQJSM9>{&Z+! z2+8+;ODoiRkcBjdoSd{s^QAX!kk+Hc`;}K6e*tENxP2d)A85Zbi7)SJd0M* zf!sjaH*+%`_wl2!W;2S7oIPG?_dk$a#LEgv_J_V#{2hc|G@(_Vs(>Pyq% zk(Cq_-cIZLJ*VB^(!o$UIu^E;_9^b6ZBs4CQFc7p4w_BgLcX+vB~AM;^(Nza9kjvY z6}gSvNp2D@w8c-BT#Jm!HL@RVhzleAwN1ngQlX8mwX`^a=zxP9IS*1JEr*e0xpNGe zn)Igm+=;Y7_X4R1B-0wR>9nR`Jem2l(Y6dOEmE9EhHJ)?9ov-_E+<-cU?>^thm!S2 zSK8_~j0{RnlF8c3WO8XHjalnT`VTy4#Im(Cx#vt$e|(dAf9Q)h1{cXRWiA;sMv}&2 zB{KKkLA{vRA^YYk%{sA-W{F8tucrnyuw@|iUcHMn6|a)`>k?Af-JfJTou#SXfush)MIf^>fiE#y5I1jG07Kc?CJ@`Ns*v_1uT-gK7yt^ z=t2^P!KB^Om*fYVdO){Y==4qt#Vi*Z6Rz$mO0rjv- zB>C$Pso$zZH1wE*lwqq$pq~YaDdp18>lcx-`U4sYl}W^>jzp9mpk90v4SpYp$1g#W z1`kMfiXvV;+Ki;mRW!C@4(?p>qrt-mQO~vx+_}+<*jDi*+_MYyJ>O1}D#CbGd;pJ< zUQ(z047^O-g$M~z629V#WWg!86Hhes!5Xyr-Js5PH&E&=N^*_yBy_e2Z`p3tMPn67 zg)~r){X0pjlQC+JY$YirP-pA8XqMcAbQcE_4%|U~_f4h2e(k8zdXM`;6G^h94|VMo zjtpf{R8Gud?0++fELS3_x@UMVs!Tn)+#(?+26T}cLQ;19kl*4=LN~^f_?SMZYg$gD zKH8Ir&~1|N8;(!z<|Ht-D=Mcrk!1fyyx%B?hR3r=c;QAg4w1wM&I=U!O~l9ZS4m{4 zDoN;xlW=7a3bgy7u4)I~%9o-pF%oaz9L3v+StLGcJTtxpG;&iE-WN@#fz$gSPo$pu zJvvK**PW=F%rev@3?(sS(qPf!co}jZtxp>9BIpU)rpn>X${TpqXAN}N`ntk0f<*&Y{8ExYSvdv13th>byi3?>g7xS^Gf}H|jzC?I%+g zp{0z?)Y7n?eNiSNKw{Ug;jVE99i7 zkFv9`aWiTGI)>GtPHP`-CeOkv!HX!^xB{=rqe*byKoZ!q2S(3KaqpZ3%4C5{ea!Hp zyboTd-of3_E_f~2htx`zkmQZ&Boz38`o_JZZjT?~1vF0GFzcoA-H{sa|F_P%sOoC%ykj%9S@VLE@I_({ZYNtBfy!7NvI|*J0q)wJm z$Xom#Z!<||NgSLIY5_syKO<^ zLrJ6`cOy}*2CClQLusikIA8& zAZLILe1_GdRymdWM@OOlqcQGA97A%8CqBt)Q@82aG$5f8xhdJyd&o!XnmU-e^)8^U zMS*zIw;o3q`q7XUTk5JVf`(-?skf8|o@Ro&&tuj(DijxA?8TEU+B7&l4%LeJXs}X6 z``U>lkXC>%0^SMV$DPK-c$qZ|(Y-aO+tjl(sGmOq zCE`&e;ZD76IB0A*#>Av$)OA@ObhLtcYTw7lV9+2}W$Jl*0Cjngj`WGt@s7I&G40Rm2A-f3i3F_sC7yHU z;P&ZlBs$9#m1ZLlchiV^EsP@p;SzkxpH5=QuGIbMb@*>ufwsw-sJu6eB(GSI@Z3pw zQO2Rpc^6PEDvy@Ed+|2u9^UTif_v;W$ea|wyw*tK>P&ptvL7YAE}~+l4+#c3ptQRl znwJUT^`r`PEFOjzkFHbqXf_%XCLuKbIf=4IB71EX9+eJ5okbZQn{CF&t)1~a6-ak} zN?o#p@It$YBt&cR(!PWQT4vxaXAz!?twZ~}9VC<{icgi}(Kz@xi3Z1_Dt8uQ*4g2i zXew@}&&0z=*~khuLbIbKQk}Ax@?r*3MXsaCml?NeHeTKrBLQ1w+{=k2p(&ndym1;$ zoC?$|evXRb5)s8eSw(b%TU6n@Lh;EOJZgQNFP^p1)K@ zo|X#NGNrp#|e0(!rnCqi4xd?4p*HHD8neUKpB=GDws_$7L^UNkZTYUlXdq&~q(k0A1 z*W&H;xg^%V9hDn$Ng#Rw?hJm5j@6&gZn^>$s@JL0nrnDDvk{F)HPQa0GhX!-CBd#A z(7a(D3M#Ln$@vmqmMtQoZj(?boQ%pD)9_YEiA2xUBcXg1YNhw1Qe`M=ALgO9oT)GO zisN-}Lo{tOMYdp9JWCsbLUtZXJL1tW%^208GN`stz&*N)28m3Z4e`hA`yBw(_ zMxu3UANck=j=X@k)K&L13R)dWR3U_^c}5{aNebz+wxVEI2=aqBqV;qHo;IJr!=M+) zQA;2R_i9+4d5TN%>rnq>D|KRVp*w3m&ZPH;Pr)5zR;p8v!UrTYb0dnP1|nzaF+5xF z1gBLmAiYO9+7{QMbYU#st0*8_G!Lit>*7Ik5$=1kaMt_`iXNW9nW{3E zZOETefL3K!WKXF=>8&slYo3k5N$c=(o-GpV&ogtbK%xgvlR$(Ao?JbK>XKo2zz)W> z!E2GTwiShf*HF)luU>v8PRwXVk;ysKY+r{DEq(Dotqix)fCqa`@oKgoB9&(%(quF4 zS;yjv31gdMJ8*uFHL@)4AYWuGs^{C|ezGsF>GecWTQbgmG)MSdrk?Uq!KKFoQJ<@c zGuJfm?20bh`!TiAwzGKO5{;WBQ{X2w3rSnEQRDsw4`$h;&2SLzcSs{`(K!;;x5R}r zD{%7dE}rAgIpQE&BncCCXZK)Ls}@ddz#{oz?l7(Bn?&DO>0o=_ziv+>Jxcj&q9d9xalW>Fcvru_*Bl5dm#~s~Jln3jhe2NcRCyv9t z%gSgu5lh`$q);D!A5Y&4FlaqH)bK z6h=Qrbj5bOxG@D4t-X;Ok$|GXozVnIlqIer!99~v;hKo7UGc~s-XD2IR>-%MK;@ip zRE;`~%pT8>d8!4~(wlJlgc+h)S;#%R2T9(|h~89+{Et`g;lvW08oUT8J7sXCAQA;9 z`{SMJK3p+dk55N{8s}8xFg{&y+!2+pSD~eR3{p~=d3tmP*~!4oK0AiMGhLnUgW8gZw81FpuF;+~W~E;V=|ee`Lh$15ZK-7sV?8iL@I0l4~U3JKlrK!wIU zJhwK$Q}HZZcdx>eh1p0-)k50qNaPgkz_XiUk#Ix`JBQdKp*u01vA8yD zDITV0b;D`xaygLmNWg)GFk^Eb_FQ$U5LW%}pzua(Rqozao^Iq~LBv2%6XJ!LwFTJX2nTwpZQoY$20pKW#(l z-a>rH8;tAoj^MEq8%aGIkl&z;l9efpPp!cV>_A*XPt^1rhnK>0P!nMXJXwydB;NwLT{hq%01PVjtw0G$W(g zACC)nqs(dvZ0)aahWvpXU<^KdGPZj|EwVo#JOWuVFR z3F3CIM%J`XNWZfcwJkQdaf7kxt%Fb|eHDdQ58*}MIOH<(Td_4C?a>}cH&}x5Nh!$i z%)^PmL&)$}N9vw163W|yo68^L+L`mXxcDya>mSE^m&16;)Ok(UpHY{X_0&m-=_@Pt zAijAE?qAxD{KRwc$#lhyE7Nhqa2sxz-^Q(r8&T~w4tKSfIDfeg?-PPhEPezJoX+6k z!_G*4B*)aHgz{W#yfE#LjuB6Bo}Ghd)>jZ!5`hv7M3zMcsv}EKd@~1+p2y(sn$>uk zu7Q$u&(I`Xht_MgsFWLkyurd~bz=Nyg*q|@JD_C#GNcEZp)HW`NH-6+=cnW8@?6}U zF$9-vd*Y4KPDJfEjFN~})Wt4Go^KkSI?JI+JPdc`KcGs^2puc*ks+CiXq}@die819 zXN+&oEF}?X38q&rAmLd@kvLKcb?j9rjo1XwwUww^`w*$a-lBAx8;U+@AYAYfF5fW1 z6$@EJ%-x6>`7B)a)I^@UFw*8OLDKwMG7xc z!u5$W@NCe2WQT<#c~C8qa+vsLatbvXBk{5Sdc<4%qcCqX$~1bSRJ0w*8;2m{{C!;R z>xYu!S2%X_37$1s;+Ag>5=K8n-o%xoq9|>WKv65>{~^zCYwQ%{ zJak6Y>F2oXXNCIMySUvp5|0C}GHX$UPq)f(ExIQPiYMcBTo7(|0^-IT!I?qDcz(11 z?&0Q4|5S~~^>Y!w`6SZo=ON$v5S|A<$F13>D6;p1f1WYQ)E}TxX$^8kJWy0kB+1x9 z{q()WDmg-8k0;^z#-r3{s4_mRTSx;62axEIBWRJ)9CDzU@CbMA=4!#Nk&dQNM`aiO&P5PLuAH1-6PYz zc|4Uy)KP3jEFDZWA^Tu6vRuB4)DGs6QrJF{y7iQvhnIQ6PFQe9AF+QdXQdU0)|m`yqe*(&85bRy!Ha8G&@Lv1f@B%I$Q8%? zdHr!lxI1;4-aukc#!#oc1ti6CB%y+GJQ}tKA4~kHo7zqiPBbIYov-oc)MK>wJ%^Sh zfp|5k7!Ad$)QySTuXc38r%U}w$RPlgk*`tQ*$=OR%1ENU9PLAl@$TMZv}j&J%apk! zxN|&S*(Or&*GH)HdK(g&T!|-a8N4Z&Ou{Xr@M>}!b>np4Nze5p8nT{*r00{w+0AHp zCxeC-BfQhBrd~rDX=Jxo)NRUX>K$G|f=cn!H(@sQ60yR^_?I-M#EOOnWDxYbkoJmZ zGFUc-Oe%!Q&{3I;_URIPfh3LVTuNde&QaHQ*(8~BnuND+LR&*58Vs4&Uc994Zlg%x z`F`qGtw((qZKO_NW;A%-I_f>47v5TAqgJ*rNl1^t{jjk#cxpO{C4hP~36r?_Q4*f| z7WHNQaa&mkZwDPk#cQCfXf^f7e@?=5o5YSvy_LB|a?eKu6pVv;kTkC{TF^s9{`yWE$iz?*?f5Z$VWTB9e(Vfwz?T?dl0)bRjLWgbNN8!ltpT1dFgM%iW`WiGC$)kKeQ`49@ zAvA0sG7Nn2cGxcJ{Bjr`CJ!T_PuX}ebQhY}@4!3VR5Zt@Ah%Tn*&C~nz4szA2lPiV z`yLveX5)FeDcTCEQ84#4KCIu5yVgfgttN@Q=EWqjhRN@T)}VTlDn6~7ji#2f)VX2; z4HUAYUh`j3-)p(lLtBG-da+4xm<-BO&!MhII^NpO#k0%9(YWCi5*IV;pWA~Zra0l@ zeFr>uT8LLky-~f+5$&A~Q0*#%97!RxZ;L?b6-PA7?ndP)PZVk{$D?2wWH0N51_x^r zux4sdZ70;fy?}y#t|)tV9(Q}~U~2Tkc+kBwiUqEr1?qV8Vjix~ut(N5U8GisBj&{{ z)CmXRT~sIJuVtggO&Sk{2conv5jAJxNPtz1a#hrm3P-<^b9U z<{(EW1nE*~xU*jtFS4?cejorBy7oa5hp88|%W)^K9XTR*5Pxh7p0pX`(X#C*KX3>I z_5rBgqk+1_btvOzpz7`fqy-(t>GN|@ogj(3**VBJHbj;Z)7S3fAiDV;8W%U<_Lx+Z z^}C7N8WWK>FA&!*N#agp3|=-6D!D1B(zn4Y`y)7L9Elp=0VKR(5%ua*M^djnXhPs@ zk}XOmqq=zF8Vb=?_qk*gvWtvP9HjKo=P0PjfPZJ;Kzlz@E;@`V`B~IufDldJ`+`=d zUZ6QeZ^%L~mNZXaCzI=nB);|`v0H0M!TKpl9jc+B#zG`Ax10pKY#_mmRQ~+|$$R_I zwsb7-{s5{<))On-p8C615?U6cYH$PfZj-@#-C`Oty$3$59!DZx?$l+F8{U^cqh9Yv zk?@oz>b+NyIyp&^wBVYS59-D~#JiQTG*G>m`Y4Oj z=v(_}pa2m|fXT@nTWMa&GIF}Sk>>R&rkV8*$Yk1i8Z~|)4ZggSl$mjK*3~3QXL}l@ zF_s2oR@0cR>uJ(`aaxdgpRChm$oixN8Qi%+YMC!+WOo)xwY5; zGTtmajW7tMF);?;&$yj~!LH=@(2$CJCLy<$ny$F)d2 z%$f$MjG(S(S=8HiId$KfgZK3bc(dG{h7Z_7+PFod72c6_b|}%OP9!$02laP0B{_lX zq$Yool&%~iIZF*1S{Xvqs!K@m!D|{`A4oHiBy%(khC3#`aUb7 zfm`aSU%)QvwKJTCMETN?nfpk@`V7&~=cID9gvP#{Oe#vHq%0{;UDvW{_}Jq#xYq$1 zKR%6EJqD1%&1*E=NSY?Cn@0*dvuNz3J~U2ImK0T7NOAQ#l4ZZ4kyBDh_JI)1ux}=X zE?i=<)JSE4Gf{;r$()uZl@W%-PHiWhIC0V<7cvy9r^$i+$hfv2&A>e}SviTg+OEV| z8cH+gKBvXEx01=8Y+COZNvj1U$#&Fe;{~#4lRe4Z-Gr9x>_Ik@`qQrbP2^l8LhH?U z(az_uXs>xZ9eR9@_BY<5{RLadd~^%x51T<-C-0N#Y*;W$l^?4X4wMv>8;!?eP( zg!FZ%lUY(LEgC{(#;GS|&M?y3XhIrGuhQZUaWZRbBZI4-$U>@~nR`W=tk;#a3vSa) znT@2qN0cUx@+NtM5Ry4nLGyKONzLy*DV?t%*$-<-$>s&AY#KoFHOojj#GgjyCeheK z7iiSud>Syak?7rQ8Y$gIV`iNuxkP!AIb=>l1Vsql3yH=aquwq@Nrg=$cDaSd>3pKm z^Tnu#?m(I_eT(MH}woFq5c~6#A@Y|@Sr%7V)T=IQB8ub zInW)-FF$x8@wFxOwL4G3FUqOQh<57xj_Dyrnxn4wdlFjTha@ES z^Ue&A9Wel$BK_HrTeGs5uLy&jJ=#Zd9y88yp{aeuZG35`91($E?dJ+~o| zo+Y^7w<}sV_d!VpQ-ieHBYV>c)TO*a)BfGaSaJ+^lSbmnbbrKts6|>*4_xLB!1cu^ z@VKB5sR2{)$VCVjCU!!>`AEDBdWEVzxAA2CZ8XI{MCKD=)QAqk{iv>Z)o&V#lb9YL zZ3W_nXyD47Vmx{;i?S1%c$Rw>sg=nj#x2D)VK#EjoRF3=5YZ`bkSS$_d)=M!#;Gfc z^&ODk)dCp-Q_++*48@DqB712sv=5ty+H;Fhq3Mgqww+Ov_#7F#XOm!$EoeR1Na7bQ zP+X&gcU?E5E%GDkCEJm7s}ZHU2ziAz{QCnN+~4DVI>S3N8rd90Tz3q>soe_5eb$1@ zlR6_~;zwjA_ry7gA-FY=sa+13AY)MT$QZ7f_lmw$%sOu(EyuBVpV|`-De86?m%u*pEs;1Ec zHL07eHyY-!aY<4eSKiG*)}373SalIO$>Wfrl7p~Qt8q)y8s~M7;c3HmT#T^7Ro^#A zZr~!oKMH5Iu0h@{Jw#qR%hb?=QEDQKB=v=eaS%YFy(;_{C898+4{7E(0!A%E07+|W_L1-}*W zTd9KB=pl&hBZaI9t5E;Z1j&Q9B28-o&JTUYa3kQaQG?qPZXjuDGA`W8K~PXGE?ZjRuPdVY9 zcOEi?V{neU0+%QQMXe_hu`?ABT5$+YSH=c17d=L_@ z?858uH*j^TH?~_XM$r7t@H=}Ff&1NXPP82-TNM$xeemS$L}dC1Bi?O4idbeyD+tB8kA?_K%0SMii%2MXinE>} zs1i&^*mI_K9={bO!DEo@w-;wiGLbz)8>#U|aF2R|2(>Y|v}Opra_%vHU4-nQJnWKm zM$pJaL<>De;Q2lXtUZE=svYn$K8&!;E(kq+3n%JRka$oSA@5cob*C3XhYZBY!%euN zk%v=f(vh`$E>7eh#O@dytT;0h2d1hb{nRc*-?KyJ&^tKgmxZXio;caU*zufZ1YYWo zv%55Kc0>Y_b6z3b@(~hqy2HDWlUfXREb)^*-k4;9D!aVptehmM~@rW5?iNrPHh_AhYD{}`Sf*p*+(+RlL zCkdWivT%jz0dIs{V*E7=2Ukfkb~6A0#TAI?5s$MP#_%!XBG}>?t__JtP;_r(rS-vy z^=a7P`yLlf%W!eDG|mX$!Wr26i!SS&4|a>y0ywV;H~L1wTtu_(}D_=~L;ru3LoD z9qVy zQj9|p{LTt-&aQ|by9;-NI*@im7dg5kQNH~Fil^;k#<&5&s){)4@(2f5+i>&sM_erm zL8(GCF7{AC`f^d^tGvM@@%@NY9e}`YpK$f<5#)wVV5SzChSLemT>qAe*SCnyi?|THrJLBw-Wr%z}6}JaBA=R@N!lF`eI_xsS zQ<*x#vkk`$ryzQlGPYY^L~yVjg4^FAH1H`d4{O5F)&mF@j6;mP9s-tX(TSzR&Q z9A1g@Vx5q9M+#@eYO#L_ldCkw;@H6R@E@}Umkf0fK5Hl<6fVH8cqUF52_bPpG|qXQ zL89A4oM>nK-CzB{2u9~3Xdl*5PDPAV{wjo>CIn zy-)%nhi2epH(5mKX~0i;9U}X7LDZv2_!Y$=((gD99bn-|l?MFXIXG8073W*b5NF(u zxYfys9bAa0DNm8`!WCymB*8na2f_uUadNQ`{FX5}X0tjX8iyl(`yxaIsKKkS0HI<_ z5Ma9w;ptfj=>o(}Ge*>m<+$vw2A@mmxHPs7hv$mIUvLYK&&|RyWf3NKo8jE$dvM>~ zgb35lxWwrUkA)V9IDH)fFQniVHW+~eC&PbGBD^QFap1B&A_eTRzh){fj2VW5$&2B? ze<%Xnsu28f4}v*i%zWrz=KwBzj^2iM!$No;U4@AFmk3Z_gK(2P1b=vo(=VT4$H!i9 zPwR%`yKf( zS{saz51rwj(H-tiQ{WSK413~P@X&mUJ?gITKj{IlGItz^5k#o7FT7{3#=h)ugw4Bw z$N>hp93O>aOUmK1q8s-25JU39u83*oAoPM80&AATbJ$xPUNjZEX4%4B065^W5PpY8 z!Dr}JxVeCwwKu}^V+`Km9^rQheb!9xf z)cPTG=5)9*b-`Av8}Lwa#3_-`wDzh&w-Uf7W3hEaW{^M z<|1&M1a_oJ!sEI)_Kqro?<#-z_PPT*WoN`RZbtC!lUS=Ej8OCA;K~Xhq&yC8HBNBb z5`lo+w{VycQ?Nt0DyRaHy&TVKwr&P|xK5at-)0 zdiyoa!R`_>1YcZ%BSw$moii3EQdYv(IvB@mhr(Ou0Rja!~LsPE)M!O!b?FGiHomdvwaEz--lyItSW-F1(~`d0PdaMBFb$zyys2C z{@#<}+x!$>>PGMumxNnIcSJGy(R0g3c+}}(pM(s26;E^@H|$Sg(=6?m{i>JuQd5k9;uyY&tx{{ej{?> z_2@mkw_d`L-pg>(=^)$}tbyl&o$!;+ z`r}aQVw`X>WbzNwkJ|^M zwVxqe$C9b-KEP)~FZdMCN7(Qw2x&EhS7{3lK3s!+WmfPXYk{4Dt_Ts1M3_HQo0??7 zqvQ%UcbS00qZHwz;ec?~aU8oh9DxPh5vJ0FBdgZIM|nN`+}h!9(H;9%N8_Z_MeNe{ zLGacoOiWz`@8EQ%X7$GI%S_(OmhIgB_H9_8C|ytAz-IIHajpjJ|LB` zg=)C36vl>(WjJK31>e0nI51=m_O2a(lhN`Bn70w0n?2!Pp$VUsF*vre2`9RhV9%CM zaN%Y!xzG!4p8F8Gm5JA*%@GvYfCz6VY!OVv?v0rU(w>7uN#*c+8w@W2zYRe0uaodP zY>#cO8H|0);ApV|wsolDs8klh1k4c`X^ZU(oe(I$2*+MIAW*3ve3m-lsAq5N(LRsE z4FM*h=PLFdPJ>?Kv=@Qh9&H)EGo#N0)k4d9m+7fTQi2gnY`A#lbKM5K~4W@bFX$Y%$8m>_cQChQD3i|t+u$o_gB z*;{rY@?R;~JZ=(FT>8Vs#}`qTn-Ir(jQHY_2lhXJC8mLt~N3J31{ zAjnsUnEq}^scy#ZrYJ~SDk0gC1L-G5$lP=eDXenr88j17rfkG`UqYk@9~np4@OSwJ z**69t^KU(nI==z&zttdV-!MeYiGp0pLb%)yg%j%#{yF*lWnUn&jGP0$;YbR|Cb`@d z6dFxNOz}UF=J<^4-?!k_{0;_+>HtmI|?9)C>5W9g4jL z97vsyAZS)IBtNCVZ}eECyU##O-%x~~IRs=9>hNXkmNlABpZB4e3L!u>q!Xd`yHYsmk~YF9&x^X5H`ISv3(=3$uJMeCLfS< zwgh2QED*+aLDYjSWRCAeq-Aexo#2bu)0yNR9YKJz9rphD5P{QVh_0H7y&w5P_H`df zdXP03;R4ysImjTjmt>$5B)vTlKiUF;i;57lHy6RfNuE*rF8r@}Ba8Av(1=!qZb7~f@2JWZ&4S7HrW!O^e0aEJoCUeT_ z_xsdhdq3){*m5d1V+(c6Z5(y~!XL)HFO4+r_vLiz=5uqZ?)pk9`Y1(tJ+z}{&y1$V z{aQ$k`q`7}-pvNry(i&bbq;iu4g zXl04;P&^hV=8_t)-!3$epAv^9DA^W)8k;ppBdOQj&!MDAhN6HxQeR~t|D*k= z-bv=B^+g;L_d@!;LQ)HOlbZP=7iIKXj8>zROP~39@4kkLHeC9>e zKK>as{re$N)))J0Oi;zkCiT;8Qmd0QIiH8pyJTIQY(&|P0OY0bKwV%xir!t0+^Y?! zOjv+y*546*)Dk6S;i!Jp9S5eQ;ef~x1#7ONuw*@|Qimfqs}c>Y6DU2i6*Z2bhzr?> z8izqBtr&{^|DJ;ShuO%g-G@WBqfkkuqt4$51%u_tEB^uMzsyI?kh!=-YK}v7&1kfZ zM8$(`Xc~ABjpo}>J9;~AoLqum8pH5MnGMx{ggZ5&eg!o=)qonFc$OOU(-NwiQ7f9) zTj0B;6*yY>1u9aTaWw5ZsYAk%H#Hp%9)4(W<)VB*B^v)AF&5h3@bKL@W9yDHvwpz= zPaz7*Em0FX2l*>T;MAim)NH?o%I#0lFnR(Cf8!&6S{`JByii0LAZc?HO7E=1QB)v* z>{1jjyn?bsALM^nhKyw=alnwAw~zay;r@#L@=3Gne1w0Hv{pvb|Uim1tcz>jmQUOkW~ke+`bGEYXp!L#Uar= z8%~Y8$hkQJc{T!s)!iU_mmHDD8xdKz53;lIWUo#{$oRcTS#cFn|K5i@)4`C9_eJul zs|YlG4-r|bAh|_y7H<|39Jol6bR#uQ6J&i_kZB==n+__3VitM^amujlw=>W2D|+iK2zUh?}z#Nta0tF|hze4xUKdu^bh> z_CRhv44FowQ8a!q@^AIQVRsu`ICUDQ9ojNzZCVe$D*i?)bKxCL+VOnls5;U z;E5C&*B>G6%mm~Q9EBqj??JwRtc4ezsc3Xagcd|?QA%yML~S%}$f2|p>R z^@Z;sYuN^bPw5Yt=_Mq42VwWatB?-*6Vjap$ZZ;rZ>EMKV)Pz_gkD2Ji8=5FQKgKqzQIKZYA>={}0w;a||8M&v za4V^q-p?iH9UH-oq!zw82(lhe5PSY>NTmA^+A1XVVHGyC7$Mr?FgXu15VEWkk(2Kr zBIprz?J`8plnUhDy^NHgPY`@@4kCOFA^mb9A{>9j!P|F`Ryq$UV?HJM_Za5?2T}}O zkn))rC-)TKxp_F%wBDl(>nw%F%w{yO4!tKl(s1O&2G{QBGb z`OX#74boSO-P`CN9gTNJNB-#k&+|OoHnYSY&P*fxbsf24%wf8R?Ig2J8#6-#+y~4p0(now(+8k*ST(Tc2l-7Q~B(& zKL@(yr^4Sf4&Q`rre%B%mQ&~1sGrY@``cB2w0<0=ehRd;minpq*stlQ=qImYYA{zi zhw^6hQ^aS*=9|#XEVb9~E$f#f(ojD|KX?_&TRZ(;^1zk)xq7Vfbf$5p#v1u!^yBJS zKdyTHcpC7)8U1*Q^`S(UYsrsS@z0IvR*Q zrs?|Sx9MlGNC)~cymy6u%I@Xb>BrZxeth-%2?QGIXU4>*Lepy%UmM@vSWTzX*E>;n zMngtFGaa8FW^d`Yt^p66(NCa=&uS_&cJ1b$(h2d`uCr^`wPWI=^%Lk=KY?2P*c?sQ zuaJpvy0wkXRFX<*6`#rp%}aQB@_ISDxHC|;RbrBhlP%H1g=sL~2@x6n6!BTJ88V*^ z^b@MrkEiMRD^l{Y?dXwf7oW(-xN7y|X*~aUbbPiJ_V$j`thLKuR8Hvn zO+j0CQbKCIyjZJUm9)#Qu3w&R&kx4m8^+;ly#Mj(_!g6Cqjh{LC-nD2tC`f)B4d5wT(~Zg!p@v>Y)Zg*M;w26!BT` z6ffyOKQp!Z2{m570_OfIIXl|re@Z9B-%H8@a|qL42`j#ldC3pybp57`Pj~ZApk6(U7ZGDHYm9MCUcs*k$R!#9$bKy})Ao;f zCJMwKm(OLJa%s=xJ<2+w?HZ5GHshL^368bBKW literal 0 HcmV?d00001 From 6702918abddafb4bd66965bc3e133fdb4be133c1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 8 Feb 2024 14:02:08 -0500 Subject: [PATCH 0646/1112] #10271 fix tool tests w/arrays --- .../edu/harvard/iq/dataverse/api/TestApi.java | 28 ++++++++++++++++ .../iq/dataverse/api/ExternalToolsIT.java | 33 +++++++++---------- .../edu/harvard/iq/dataverse/api/UtilIT.java | 15 +++++++++ 3 files changed, 59 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java index 10510013495..b9db44b2671 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java @@ -44,6 +44,34 @@ public Response getExternalToolsforFile(@PathParam("id") String idSupplied, @Que return wr.getResponse(); } } + + @GET + @Path("datasets/{id}/externalTool/{toolId}") + public Response getExternalToolforDatasetById(@PathParam("id") String idSupplied, @PathParam("toolId") String toolId, @QueryParam("type") String typeSupplied) { + ExternalTool.Type type; + try { + type = ExternalTool.Type.fromString(typeSupplied); + } catch (IllegalArgumentException ex) { + return error(BAD_REQUEST, ex.getLocalizedMessage()); + } + Dataset dataset; + try { + dataset = findDatasetOrDie(idSupplied); + JsonArrayBuilder tools = Json.createArrayBuilder(); + List datasetTools = externalToolService.findDatasetToolsByType(type); + for (ExternalTool tool : datasetTools) { + ApiToken apiToken = externalToolService.getApiToken(getRequestApiKey()); + ExternalToolHandler externalToolHandler = new ExternalToolHandler(tool, dataset, apiToken, null); + JsonObjectBuilder toolToJson = externalToolService.getToolAsJsonWithQueryParameters(externalToolHandler); + if (tool.getId().toString().equals(toolId)) { + return ok(toolToJson); + } + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + return error(BAD_REQUEST, "Could not find external tool with id of " + toolId); + } @Path("files/{id}/externalTools") @GET diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 9a280f475a1..22abf6fa2e3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -197,7 +197,7 @@ public void testDatasetLevelTool1() { .statusCode(OK.getStatusCode()) .body("data.displayName", CoreMatchers.equalTo("DatasetTool1")); - long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); + Long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); Response getExternalToolsByDatasetIdInvalidType = UtilIT.getExternalToolsForDataset(datasetId.toString(), "invalidType", apiToken); getExternalToolsByDatasetIdInvalidType.prettyPrint(); @@ -205,12 +205,12 @@ public void testDatasetLevelTool1() { .statusCode(BAD_REQUEST.getStatusCode()) .body("message", CoreMatchers.equalTo("Type must be one of these values: [explore, configure, preview, query].")); - Response getExternalToolsByDatasetId = UtilIT.getExternalToolsForDataset(datasetId.toString(), "explore", apiToken); + Response getExternalToolsByDatasetId = UtilIT.getExternalToolForDatasetById(datasetId.toString(), "explore", apiToken, toolId.toString()); getExternalToolsByDatasetId.prettyPrint(); getExternalToolsByDatasetId.then().assertThat() - .body("data[0].displayName", CoreMatchers.equalTo("DatasetTool1")) - .body("data[0].scope", CoreMatchers.equalTo("dataset")) - .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("http://datasettool1.com?datasetPid=" + datasetPid + "&key=" + apiToken)) + .body("data.displayName", CoreMatchers.equalTo("DatasetTool1")) + .body("data.scope", CoreMatchers.equalTo("dataset")) + .body("data.toolUrlWithQueryParams", CoreMatchers.equalTo("http://datasettool1.com?datasetPid=" + datasetPid + "&key=" + apiToken)) .statusCode(OK.getStatusCode()); //Delete the tool added by this test... @@ -271,15 +271,14 @@ public void testDatasetLevelToolConfigure() { .statusCode(OK.getStatusCode()) .body("data.displayName", CoreMatchers.equalTo("Dataset Configurator")); - long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); - - Response getExternalToolsByDatasetId = UtilIT.getExternalToolsForDataset(datasetId.toString(), "configure", apiToken); + Long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); + Response getExternalToolsByDatasetId = UtilIT.getExternalToolForDatasetById(datasetId.toString(), "configure", apiToken, toolId.toString()); getExternalToolsByDatasetId.prettyPrint(); getExternalToolsByDatasetId.then().assertThat() - .body("data[0].displayName", CoreMatchers.equalTo("Dataset Configurator")) - .body("data[0].scope", CoreMatchers.equalTo("dataset")) - .body("data[0].types[0]", CoreMatchers.equalTo("configure")) - .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("https://datasetconfigurator.com?datasetPid=" + datasetPid)) + .body("data.displayName", CoreMatchers.equalTo("Dataset Configurator")) + .body("data.scope", CoreMatchers.equalTo("dataset")) + .body("data.types[0]", CoreMatchers.equalTo("configure")) + .body("data.toolUrlWithQueryParams", CoreMatchers.equalTo("https://datasetconfigurator.com?datasetPid=" + datasetPid)) .statusCode(OK.getStatusCode()); //Delete the tool added by this test... @@ -594,7 +593,7 @@ public void testFileLevelToolWithAuxFileReq() throws IOException { .statusCode(OK.getStatusCode()) .body("data.displayName", CoreMatchers.equalTo("HDF5 Tool")); - long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); + Long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); Response getTool = UtilIT.getExternalTool(toolId); getTool.prettyPrint(); @@ -610,13 +609,13 @@ public void testFileLevelToolWithAuxFileReq() throws IOException { .body("data", Matchers.hasSize(0)); // The tool shows for a true HDF5 file. The NcML aux file is available. Requirements met. - Response getToolsForTrueHdf5 = UtilIT.getExternalToolsForFile(trueHdf5.toString(), "preview", apiToken); + Response getToolsForTrueHdf5 = UtilIT.getExternalToolForFileById(trueHdf5.toString(), "preview", apiToken, toolId.toString()); getToolsForTrueHdf5.prettyPrint(); getToolsForTrueHdf5.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data[0].displayName", CoreMatchers.equalTo("HDF5 Tool")) - .body("data[0].scope", CoreMatchers.equalTo("file")) - .body("data[0].contentType", CoreMatchers.equalTo("application/x-hdf5")); + .body("data.displayName", CoreMatchers.equalTo("HDF5 Tool")) + .body("data.scope", CoreMatchers.equalTo("file")) + .body("data.contentType", CoreMatchers.equalTo("application/x-hdf5")); //Delete the tool added by this test... Response deleteExternalTool = UtilIT.deleteExternalTool(toolId); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ec41248a65f..d67b45b645b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2339,6 +2339,21 @@ static Response getExternalToolsForDataset(String idOrPersistentIdOfDataset, Str } return requestSpecification.get("/api/admin/test/datasets/" + idInPath + "/externalTools?type=" + type + optionalQueryParam); } + + static Response getExternalToolForDatasetById(String idOrPersistentIdOfDataset, String type, String apiToken, String toolId) { + String idInPath = idOrPersistentIdOfDataset; // Assume it's a number. + String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. + if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) { + idInPath = ":persistentId"; + optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset; + } + RequestSpecification requestSpecification = given(); + if (apiToken != null) { + requestSpecification = given() + .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); + } + return requestSpecification.get("/api/admin/test/datasets/" + idInPath + "/externalTool/" + toolId + "?type=" + type + optionalQueryParam); + } static Response getExternalToolsForFile(String idOrPersistentIdOfFile, String type, String apiToken) { String idInPath = idOrPersistentIdOfFile; // Assume it's a number. From 889e942f353953aff9df192c46fa2c0ebd4b0c51 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 8 Feb 2024 14:32:44 -0500 Subject: [PATCH 0647/1112] #10286 update pathparam name/terms --- .../harvard/iq/dataverse/api/Datasets.java | 6 ++-- .../iq/dataverse/util/json/JsonPrinter.java | 32 +++++++++---------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 60c07815b71..02eb13e32d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -186,12 +186,12 @@ public interface DsVersionHandler { @GET @AuthRequired @Path("{id}") - public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("breadcrumbs") Boolean breadcrumbs) { + public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") Boolean returnOwners) { return response( req -> { final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved)); - Boolean includeBreadcrumbs = breadcrumbs == null ? false : breadcrumbs; - final JsonObjectBuilder jsonbuilder = json(retrieved, includeBreadcrumbs); + Boolean includeOwners = returnOwners == null ? false : returnOwners; + final JsonObjectBuilder jsonbuilder = json(retrieved, includeOwners); //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum) if((latest != null) && latest.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 0803001fbfd..7d9bede9a61 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -305,43 +305,43 @@ public static JsonArrayBuilder json(List dataverseContacts) { return jsonArrayOfContacts; } - public static JsonArrayBuilder getBreadcrumbsFromDvObject(DvObject dvObject) { + public static JsonArrayBuilder getOwnersFromDvObject(DvObject dvObject) { List ownerList = new ArrayList(); - + dvObject = dvObject.getOwner(); // We're going to ignore the object itself while (dvObject != null) { ownerList.add(dvObject); dvObject = dvObject.getOwner(); } - JsonArrayBuilder jsonArrayOfBreadcrumbs = Json.createArrayBuilder(); + JsonArrayBuilder jsonArrayOfOwners = Json.createArrayBuilder(); for (DvObject dvo : ownerList){ - JsonObjectBuilder breadcrumbObject = jsonObjectBuilder(); + JsonObjectBuilder ownerObject = jsonObjectBuilder(); if (dvo.isInstanceofDataverse()){ - breadcrumbObject.add("type", "DATAVERSE"); + ownerObject.add("type", "DATAVERSE"); } if (dvo.isInstanceofDataset()){ - breadcrumbObject.add("type", "DATASET"); + ownerObject.add("type", "DATASET"); } if (dvo.isInstanceofDataFile()){ - breadcrumbObject.add("type", "DATAFILE"); + ownerObject.add("type", "DATAFILE"); } if (dvo.isInstanceofDataverse()){ Dataverse in = (Dataverse) dvo; - breadcrumbObject.add("identifier", in.getAlias()); + ownerObject.add("identifier", in.getAlias()); } if (dvo.isInstanceofDataset() || dvo.isInstanceofDataFile() ){ if (dvo.getIdentifier() != null){ - breadcrumbObject.add("identifier", dvo.getIdentifier()); + ownerObject.add("identifier", dvo.getIdentifier()); } else { - breadcrumbObject.add("identifier", dvo.getId()); + ownerObject.add("identifier", dvo.getId()); } } - breadcrumbObject.add("displayName", dvo.getDisplayName()); - jsonArrayOfBreadcrumbs.add(breadcrumbObject); + ownerObject.add("displayName", dvo.getDisplayName()); + jsonArrayOfOwners.add(ownerObject); } - return jsonArrayOfBreadcrumbs; + return jsonArrayOfOwners; } public static JsonObjectBuilder json( DataverseTheme theme ) { @@ -371,7 +371,7 @@ public static JsonObjectBuilder json(Dataset ds){ return json(ds, false); } - public static JsonObjectBuilder json(Dataset ds, Boolean includeBreadcrumbs) { + public static JsonObjectBuilder json(Dataset ds, Boolean includeOwners) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", ds.getId()) .add("identifier", ds.getIdentifier()) @@ -384,8 +384,8 @@ public static JsonObjectBuilder json(Dataset ds, Boolean includeBreadcrumbs) { if (DvObjectContainer.isMetadataLanguageSet(ds.getMetadataLanguage())) { bld.add("metadataLanguage", ds.getMetadataLanguage()); } - if (includeBreadcrumbs){ - bld.add("ownerArray", getBreadcrumbsFromDvObject(ds)); + if (includeOwners){ + bld.add("ownerArray", getOwnersFromDvObject(ds)); } return bld; } From 244cb1a7a3ed87cc747ede3bd7da967e6f5e2938 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 9 Feb 2024 12:24:58 -0500 Subject: [PATCH 0648/1112] support citation for files with PIDs #10240 --- .../edu/harvard/iq/dataverse/api/Files.java | 2 +- .../iq/dataverse/DataCitationTest.java | 31 +++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 69bdebb2dd5..440577d1518 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -958,7 +958,7 @@ public Response getFileCitationByVersion(@Context ContainerRequestContext crc, @ if (fm == null) { return notFound(BundleUtil.getStringFromBundle("files.api.fileNotFound")); } - boolean direct = false; + boolean direct = df.isIdentifierRegistered(); DataCitation citation = new DataCitation(fm, direct); return ok(citation.toString(true)); } catch (WrappedResponse ex) { diff --git a/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java b/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java index 4097adb0be6..23a7efedca7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java @@ -378,6 +378,36 @@ public void testTitleWithQuotes() throws ParseException { } + @Test + public void testFileCitationToStringHtml() throws ParseException { + DatasetVersion dsv = createATestDatasetVersion("Dataset Title", true); + FileMetadata fileMetadata = new FileMetadata(); + fileMetadata.setLabel("foo.txt"); + fileMetadata.setDataFile(new DataFile()); + dsv.setVersionState(DatasetVersion.VersionState.RELEASED); + fileMetadata.setDatasetVersion(dsv); + dsv.setDataset(dsv.getDataset()); + DataCitation fileCitation = new DataCitation(fileMetadata, false); + assertEquals("First Last, 1955, \"Dataset Title\", https://doi.org/10.5072/FK2/LK0D1H, LibraScholar, V1; foo.txt [fileName]", fileCitation.toString(true)); + } + + @Test + public void testFileCitationToStringHtmlFilePid() throws ParseException { + DatasetVersion dsv = createATestDatasetVersion("Dataset Title", true); + FileMetadata fileMetadata = new FileMetadata(); + fileMetadata.setLabel("foo.txt"); + DataFile dataFile = new DataFile(); + dataFile.setProtocol("doi"); + dataFile.setAuthority("10.42"); + dataFile.setIdentifier("myFilePid"); + fileMetadata.setDataFile(dataFile); + dsv.setVersionState(DatasetVersion.VersionState.RELEASED); + fileMetadata.setDatasetVersion(dsv); + dsv.setDataset(dsv.getDataset()); + DataCitation fileCitation = new DataCitation(fileMetadata, true); + assertEquals("First Last, 1955, \"foo.txt\", Dataset Title, https://doi.org/10.42/myFilePid, LibraScholar, V1", fileCitation.toString(true)); + } + private DatasetVersion createATestDatasetVersion(String withTitle, boolean withAuthor) throws ParseException { Dataverse dataverse = new Dataverse(); @@ -400,6 +430,7 @@ private DatasetVersion createATestDatasetVersion(String withTitle, boolean withA fields.add(createTitleField(withTitle)); } if (withAuthor) { + // TODO: "Last, First" would make more sense. fields.add(createAuthorField("First Last")); } From c95ceb282648d7c386e8dce88aec416a872af567 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 9 Feb 2024 18:42:10 +0100 Subject: [PATCH 0649/1112] fix(ct): make base image comply with OpenShift file permissions To enable the user with a random, arbitrary UID to write into the overlay filesystem, we need to set proper file permissions. This should not affect users on Docker or other K8s distributions, as the security is more lenient there. It is not ideal to write into overlayfs, as it impacts performance and may lead to unintended side effects. This is a workaround to at least get going. See https://docs.openshift.com/container-platform/4.14/openshift_images/create-images.html#use-uid_create-images for a detailed reference --- modules/container-base/src/main/docker/Dockerfile | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index 97aa4cd2792..3d2e1f782f2 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -84,8 +84,11 @@ RUN < Date: Fri, 9 Feb 2024 18:43:58 +0100 Subject: [PATCH 0650/1112] fix(ct): make location of boot scripts configurable By defining pre- and postboot file locations within the Dockerfile, it wasn't able to change the location by changing CONFIG_DIR env var. This is fixed now, allowing simpler backing of the dir location with an (ephemeral) volume. --- modules/container-base/src/main/docker/Dockerfile | 2 -- .../container-base/src/main/docker/scripts/entrypoint.sh | 6 ++++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index 3d2e1f782f2..663b3d9dd51 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -49,8 +49,6 @@ ENV PAYARA_DIR="${HOME_DIR}/appserver" \ ENV PATH="${PATH}:${PAYARA_DIR}/bin:${SCRIPT_DIR}" \ DOMAIN_DIR="${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}" \ DEPLOY_PROPS="" \ - PREBOOT_COMMANDS="${CONFIG_DIR}/pre-boot-commands.asadmin" \ - POSTBOOT_COMMANDS="${CONFIG_DIR}/post-boot-commands.asadmin" \ JVM_ARGS="" \ MEM_MAX_RAM_PERCENTAGE="70.0" \ MEM_XSS="512k" \ diff --git a/modules/container-base/src/main/docker/scripts/entrypoint.sh b/modules/container-base/src/main/docker/scripts/entrypoint.sh index 47933bd42e2..bd7031db9f0 100644 --- a/modules/container-base/src/main/docker/scripts/entrypoint.sh +++ b/modules/container-base/src/main/docker/scripts/entrypoint.sh @@ -10,6 +10,12 @@ # and zombies under control. If the ENTRYPOINT command is changed, it will still use dumb-init because shebang. # dumb-init takes care to send any signals to subshells, too! (Which might run in the background...) +# We do not define these variables within our Dockerfile so the location can be changed when trying to avoid +# writes to the overlay filesystem. (CONFIG_DIR is defined within the Dockerfile, but might be overridden.) +${PREBOOT_COMMANDS:="${CONFIG_DIR}/pre-boot-commands.asadmin"} +export PREBOOT_COMMANDS +${POSTBOOT_COMMANDS:="${CONFIG_DIR}/post-boot-commands.asadmin"} +export POSTBOOT_COMMANDS # Execute any scripts BEFORE the appserver starts for f in "${SCRIPT_DIR}"/init_* "${SCRIPT_DIR}"/init.d/*; do From d77cf4a9b2d6d07b66414704c8d389ed3fd40257 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 9 Feb 2024 18:44:49 +0100 Subject: [PATCH 0651/1112] style(ct): fix typos in base image Dockerfile --- modules/container-base/src/main/docker/Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index 663b3d9dd51..5fbbdd0c1e5 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -155,7 +155,7 @@ RUN < Date: Fri, 9 Feb 2024 18:45:35 +0100 Subject: [PATCH 0652/1112] fix(ct): make DV preboot file end up in config dir The location where to create the temporary file was wrong, fixed now. --- .../src/main/docker/scripts/init_1_generate_devmode_commands.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh index bb0984332f7..28e7fd68b97 100644 --- a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh +++ b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh @@ -16,7 +16,7 @@ ENABLE_JMX=${ENABLE_JMX:-0} ENABLE_JDWP=${ENABLE_JDWP:-0} ENABLE_RELOAD=${ENABLE_RELOAD:-0} -DV_PREBOOT=${PAYARA_DIR}/dataverse_preboot +DV_PREBOOT=${CONFIG_DIR}/dataverse_preboot echo "# Dataverse preboot configuration for Payara" > "${DV_PREBOOT}" # 1. Configure JMX (enabled by default on port 8686, but requires SSL) From 8547dbf4222597dc48c83f45104bb9165dcce843 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 9 Feb 2024 18:51:48 +0100 Subject: [PATCH 0653/1112] feat(k8s): initial commit of new module --- modules/container-k8s/pom.xml | 59 +++++++++++++++ .../src/main/jkube/dataverse-datasets-pvc.yml | 6 ++ .../src/main/jkube/dataverse-deployment.yaml | 72 +++++++++++++++++++ .../src/main/jkube/dataverse-docroot-pvc.yml | 6 ++ .../src/main/jkube/dataverse-storage-pvc.yml | 6 ++ .../src/main/jkube/dataverse-uploads-pvc.yaml | 6 ++ .../main/jkube/deps/postgres-deployment.yml | 31 ++++++++ .../src/main/jkube/deps/postgres-pvc.yml | 6 ++ .../src/main/jkube/deps/postgres-svc.yml | 5 ++ .../container-k8s/src/main/jkube/profiles.yml | 12 ++++ 10 files changed, 209 insertions(+) create mode 100644 modules/container-k8s/pom.xml create mode 100644 modules/container-k8s/src/main/jkube/dataverse-datasets-pvc.yml create mode 100644 modules/container-k8s/src/main/jkube/dataverse-deployment.yaml create mode 100644 modules/container-k8s/src/main/jkube/dataverse-docroot-pvc.yml create mode 100644 modules/container-k8s/src/main/jkube/dataverse-storage-pvc.yml create mode 100644 modules/container-k8s/src/main/jkube/dataverse-uploads-pvc.yaml create mode 100644 modules/container-k8s/src/main/jkube/deps/postgres-deployment.yml create mode 100644 modules/container-k8s/src/main/jkube/deps/postgres-pvc.yml create mode 100644 modules/container-k8s/src/main/jkube/deps/postgres-svc.yml create mode 100644 modules/container-k8s/src/main/jkube/profiles.yml diff --git a/modules/container-k8s/pom.xml b/modules/container-k8s/pom.xml new file mode 100644 index 00000000000..470abb753ae --- /dev/null +++ b/modules/container-k8s/pom.xml @@ -0,0 +1,59 @@ + + + 4.0.0 + + + edu.harvard.iq + dataverse-parent + ${revision} + ../dataverse-parent + + + io.gdcc + container-k8s + ${packaging.type} + Container Kubernetes Materials + This module provides resources to run Dataverse on OpenShift or plain Kubernetes + + + + poikilotherm + Oliver Bertuch + github@bertuch.eu + Europe/Berlin + + maintainer + + + + + + + + pom + + + + + ct + + true + dataverse-k8s + + + + + + org.eclipse.jkube + kubernetes-maven-plugin + 1.16.0 + + + + + + + + + \ No newline at end of file diff --git a/modules/container-k8s/src/main/jkube/dataverse-datasets-pvc.yml b/modules/container-k8s/src/main/jkube/dataverse-datasets-pvc.yml new file mode 100644 index 00000000000..50e9ccb3c92 --- /dev/null +++ b/modules/container-k8s/src/main/jkube/dataverse-datasets-pvc.yml @@ -0,0 +1,6 @@ +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Mi diff --git a/modules/container-k8s/src/main/jkube/dataverse-deployment.yaml b/modules/container-k8s/src/main/jkube/dataverse-deployment.yaml new file mode 100644 index 00000000000..5d1ed67f635 --- /dev/null +++ b/modules/container-k8s/src/main/jkube/dataverse-deployment.yaml @@ -0,0 +1,72 @@ +spec: + replicas: 1 + template: + spec: + containers: + - name: dataverse + image: ghcr.io/gdcc/dataverse:openshift-poc + imagePullPolicy: Always + resources: + requests: + memory: "1Gi" + limits: + memory: "2Gi" + ports: + - containerPort: 8080 + readinessProbe: + httpGet: + path: /api/info/version + port: 8080 + #args: + # - bash + # - -c + # - "ls -laZ /opt/payara/config; touch /opt/payara/config/test" + env: + - name: DATAVERSE_DB_HOST + value: postgres + - name: DATAVERSE_DB_USER + value: dataverse + - name: DATAVERSE_DB_PASSWORD + value: supersecret + volumeMounts: + - name: storage + mountPath: /dv + - name: datasets + mountPath: /dv/store + - name: docroot + mountPath: /dv/docroot + - name: uploads + mountPath: /dv/uploads + - name: config + mountPath: /opt/payara/config + - name: dvtemp + mountPath: /dv/temp + - name: tmp + mountPath: /tmp + - name: heapdumps + mountPath: /dumps + - name: bootstrap + image: ghcr.io/gdcc/configbaker:openshift-poc + restartPolicy: Never + args: ["bootstrap.sh", "-u", "http://localhost:8080", "-t", "3m", "dev"] + volumes: + - name: storage + persistentVolumeClaim: + claimName: dataverse-storage + - name: datasets + persistentVolumeClaim: + claimName: dataverse-datasets + - name: docroot + persistentVolumeClaim: + claimName: dataverse-docroot + - name: uploads + persistentVolumeClaim: + claimName: dataverse-uploads + - name: config + emptyDir: {} + - name: dvtemp + emptyDir: {} + - name: tmp + emptyDir: {} + - name: heapdumps + emptyDir: {} \ No newline at end of file diff --git a/modules/container-k8s/src/main/jkube/dataverse-docroot-pvc.yml b/modules/container-k8s/src/main/jkube/dataverse-docroot-pvc.yml new file mode 100644 index 00000000000..50e9ccb3c92 --- /dev/null +++ b/modules/container-k8s/src/main/jkube/dataverse-docroot-pvc.yml @@ -0,0 +1,6 @@ +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Mi diff --git a/modules/container-k8s/src/main/jkube/dataverse-storage-pvc.yml b/modules/container-k8s/src/main/jkube/dataverse-storage-pvc.yml new file mode 100644 index 00000000000..50e9ccb3c92 --- /dev/null +++ b/modules/container-k8s/src/main/jkube/dataverse-storage-pvc.yml @@ -0,0 +1,6 @@ +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Mi diff --git a/modules/container-k8s/src/main/jkube/dataverse-uploads-pvc.yaml b/modules/container-k8s/src/main/jkube/dataverse-uploads-pvc.yaml new file mode 100644 index 00000000000..50e9ccb3c92 --- /dev/null +++ b/modules/container-k8s/src/main/jkube/dataverse-uploads-pvc.yaml @@ -0,0 +1,6 @@ +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Mi diff --git a/modules/container-k8s/src/main/jkube/deps/postgres-deployment.yml b/modules/container-k8s/src/main/jkube/deps/postgres-deployment.yml new file mode 100644 index 00000000000..c5290982642 --- /dev/null +++ b/modules/container-k8s/src/main/jkube/deps/postgres-deployment.yml @@ -0,0 +1,31 @@ +spec: + replicas: 1 + strategy: + type: Recreate + template: + spec: + containers: + - name: postgres + image: postgres:13 + ports: + - containerPort: 5432 + env: + - name: POSTGRES_USER + value: dataverse + - name: POSTGRES_PASSWORD + value: supersecret + - name: PGDATA + value: /var/lib/postgresql/data/pgdata + volumeMounts: + - name: postgresql-persistent-storage + mountPath: /var/lib/postgresql/data + readinessProbe: + exec: + command: ["pg_isready"] + initialDelaySeconds: 5 + failureThreshold: 100 + periodSeconds: 5 + volumes: + - name: postgresql-persistent-storage + persistentVolumeClaim: + claimName: postgres \ No newline at end of file diff --git a/modules/container-k8s/src/main/jkube/deps/postgres-pvc.yml b/modules/container-k8s/src/main/jkube/deps/postgres-pvc.yml new file mode 100644 index 00000000000..9cefb651bd4 --- /dev/null +++ b/modules/container-k8s/src/main/jkube/deps/postgres-pvc.yml @@ -0,0 +1,6 @@ +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 300Mi diff --git a/modules/container-k8s/src/main/jkube/deps/postgres-svc.yml b/modules/container-k8s/src/main/jkube/deps/postgres-svc.yml new file mode 100644 index 00000000000..fc75438b31c --- /dev/null +++ b/modules/container-k8s/src/main/jkube/deps/postgres-svc.yml @@ -0,0 +1,5 @@ +spec: + ports: + - port: 5432 + targetPort: 5432 + protocol: TCP \ No newline at end of file diff --git a/modules/container-k8s/src/main/jkube/profiles.yml b/modules/container-k8s/src/main/jkube/profiles.yml new file mode 100644 index 00000000000..8443a9cf54c --- /dev/null +++ b/modules/container-k8s/src/main/jkube/profiles.yml @@ -0,0 +1,12 @@ +- name: deps + extends: default +- name: default + enricher: + excludes: + - jkube-volume-permission + - jkube-project-label +- name: security-hardening + enricher: + excludes: + - jkube-volume-permission + - jkube-project-label From b3321d4ad6760155672f67a95a4d9463eb5f3b1f Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 9 Feb 2024 13:29:41 -0500 Subject: [PATCH 0654/1112] Add content to deaccession info message --- src/main/java/propertyFiles/Bundle.properties | 7 ++++--- src/main/webapp/dataset.xhtml | 7 ++++++- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 157f2ecaf54..34e16e36eac 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2002,7 +2002,8 @@ file.deleteFileDialog.immediate=The file will be deleted after you click on the file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you click on the Delete button. file.deleteFileDialog.header=Delete Files file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset. -file.deaccessionDialog.tip=Once you deaccession this dataset it will no longer be viewable by the public. +file.deaccessionDialog.tip.permanent=Deaccession is permanent. +file.deaccessionDialog.tip=This dataset will no longer be public and a tumbstone will display the reason for deaccessioning.
    Please read the documentation if you have any questions. file.deaccessionDialog.version=Version file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession? file.deaccessionDialog.reason.question2=What is the reason for deaccession? @@ -2016,8 +2017,8 @@ file.deaccessionDialog.reason.selectItem.other=Other (Please type reason in spac file.deaccessionDialog.enterInfo=Please enter additional information about the reason for deaccession. file.deaccessionDialog.leaveURL=If applicable, please leave a URL where this dataset can be accessed after deaccessioning. file.deaccessionDialog.leaveURL.watermark=Optional dataset site, http://... -file.deaccessionDialog.deaccession.tip=Are you sure you want to deaccession? The selected version(s) will no longer be viewable by the public. -file.deaccessionDialog.deaccessionDataset.tip=Are you sure you want to deaccession this dataset? It will no longer be viewable by the public. +file.deaccessionDialog.deaccession.tip=Are you sure you want to deaccession? This is permanent and the selected version(s) will no longer be viewable by the public. +file.deaccessionDialog.deaccessionDataset.tip=Are you sure you want to deaccession this dataset? This is permanent an it will no longer be viewable by the public. file.deaccessionDialog.dialog.selectVersion.error=Please select version(s) for deaccessioning. file.deaccessionDialog.dialog.reason.error=Please select reason for deaccessioning. file.deaccessionDialog.dialog.url.error=Please enter valid forwarding URL. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index e50e68ec162..2afae295082 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1221,7 +1221,12 @@ -

    #{bundle['file.deaccessionDialog.tip']}

    +
    +   +


    + +

    +
    Date: Fri, 9 Feb 2024 17:12:24 -0500 Subject: [PATCH 0655/1112] #10286 add owner array to file api --- .../edu/harvard/iq/dataverse/api/Files.java | 17 ++++++++++------- .../iq/dataverse/util/json/JsonPrinter.java | 17 ++++++++++++++--- 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 5d400ee1438..155d8953d15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -504,18 +504,21 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa @GET @AuthRequired @Path("{id}/draft") - public Response getFileDataDraft(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception { - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, true); + public Response getFileDataDraft(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") Boolean returnOwners) throws WrappedResponse, Exception { + Boolean includeOwners = returnOwners == null ? false : returnOwners; + return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, true, includeOwners); } @GET @AuthRequired @Path("{id}") - public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception { - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, false); + public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") Boolean returnOwners) throws WrappedResponse, Exception { + Boolean includeOwners = returnOwners == null ? false : returnOwners; + System.out.print("includeOwners: " + includeOwners); + return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, false, includeOwners); } - private Response getFileDataResponse(User user, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response, boolean draft ){ + private Response getFileDataResponse(User user, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response, boolean draft, boolean includeOwners ){ DataverseRequest req; try { @@ -565,10 +568,10 @@ private Response getFileDataResponse(User user, String fileIdOrPersistentId, Uri MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountLoggingServiceBean.MakeDataCountEntry(uriInfo, headers, dvRequestService, df); mdcLogService.logEntry(entry); } - + return Response.ok(Json.createObjectBuilder() .add("status", ApiConstants.STATUS_OK) - .add("data", json(fm)).build()) + .add("data", json(fm, includeOwners)).build()) .type(MediaType.APPLICATION_JSON) .build(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 7d9bede9a61..d88015145b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -639,8 +639,12 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { return fieldsBld; } + + public static JsonObjectBuilder json(FileMetadata fmd){ + return json(fmd, false); + } - public static JsonObjectBuilder json(FileMetadata fmd) { + public static JsonObjectBuilder json(FileMetadata fmd, Boolean includeOwners) { return jsonObjectBuilder() // deprecated: .add("category", fmd.getCategory()) // TODO: uh, figure out what to do here... it's deprecated @@ -655,7 +659,7 @@ public static JsonObjectBuilder json(FileMetadata fmd) { .add("version", fmd.getVersion()) .add("datasetVersionId", fmd.getDatasetVersion().getId()) .add("categories", getFileCategories(fmd)) - .add("dataFile", JsonPrinter.json(fmd.getDataFile(), fmd, false)); + .add("dataFile", JsonPrinter.json(fmd.getDataFile(), fmd, false, includeOwners)); } public static JsonObjectBuilder json(AuxiliaryFile auxFile) { @@ -674,7 +678,11 @@ public static JsonObjectBuilder json(DataFile df) { return JsonPrinter.json(df, null, false); } - public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boolean forExportDataProvider) { + public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boolean forExportDataProvider){ + return json(df, fileMetadata, forExportDataProvider, false); + } + + public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boolean forExportDataProvider, Boolean includeOwners) { // File names are no longer stored in the DataFile entity; // (they are instead in the FileMetadata (as "labels") - this way // the filename can change between versions... @@ -750,6 +758,9 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo ? JsonPrinter.jsonVarGroup(fileMetadata.getVarGroups()) : null); } + if (includeOwners){ + builder.add("ownerArray", getOwnersFromDvObject(df)); + } return builder; } From 43f61e6334c087648476737dfc7d32ff5bc16d1f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 12 Feb 2024 13:29:34 -0500 Subject: [PATCH 0656/1112] #10286 add owner array to view dv --- .../java/edu/harvard/iq/dataverse/api/Dataverses.java | 6 ++++-- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 8 +++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 6c1bf42c02a..66aec38adfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -610,10 +610,12 @@ private Dataset parseDataset(String datasetJson) throws WrappedResponse { @GET @AuthRequired @Path("{identifier}") - public Response viewDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String idtf) { + public Response viewDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String idtf, @QueryParam("returnOwners") Boolean returnOwners) { + Boolean includeOwners = returnOwners == null ? false : returnOwners; return response(req -> ok( json(execCommand(new GetDataverseCommand(req, findDataverseOrDie(idtf))), - settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false) + settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false), + includeOwners )), getRequestUser(crc)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index d88015145b3..6f750eaddac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -258,11 +258,11 @@ public static JsonObjectBuilder json(Workflow wf){ } public static JsonObjectBuilder json(Dataverse dv) { - return json(dv, false); + return json(dv, false, false); } //TODO: Once we upgrade to Java EE 8 we can remove objects from the builder, and this email removal can be done in a better place. - public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail) { + public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean includeOwners) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dv.getId()) .add("alias", dv.getAlias()) @@ -271,7 +271,9 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail) { if(!hideEmail) { bld.add("dataverseContacts", JsonPrinter.json(dv.getDataverseContacts())); } - + if (includeOwners){ + bld.add("ownerArray", getOwnersFromDvObject(dv)); + } bld.add("permissionRoot", dv.isPermissionRoot()) .add("description", dv.getDescription()) .add("dataverseType", dv.getDataverseType().name()); From 1898c148512aec9943971f546c9dc7e53b537147 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 12 Feb 2024 15:39:10 -0500 Subject: [PATCH 0657/1112] #10286 add test for get ds api --- .../harvard/iq/dataverse/api/DatasetsIT.java | 28 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 9 ++++++ 2 files changed, 37 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index e1c4b901116..3703a0d39c3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1887,6 +1887,34 @@ public void testDeleteDatasetWhileFileIngesting() { .statusCode(FORBIDDEN.getStatusCode()); } + + @Test + public void testGetIncludeOwnerArray() { + + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat() + .statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + createDataverseResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String persistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + logger.info("Dataset created with id " + datasetId + " and persistent id " + persistentId); + + Response getDatasetWithOwners = UtilIT.getDatasetWithOwners(persistentId, apiToken, true); + getDatasetWithOwners.prettyPrint(); + getDatasetWithOwners.then().assertThat().body("data.ownerArray[0].identifier", equalTo(dataverseAlias)); + } /** * In order for this test to pass you must have the Data Capture Module ( diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ec41248a65f..0598bb80ea6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1476,6 +1476,15 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str + persistentId + (excludeFiles ? "&excludeFiles=true" : "")); } + + static Response getDatasetWithOwners(String persistentId, String apiToken, boolean returnOwners) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/:persistentId/" + + "?persistentId=" + + persistentId + + (returnOwners ? "&returnOwners=true" : "")); + } static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) { return given() From f612f7a0e3dce2e7e4cb0a5664986adcd2868667 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 13 Feb 2024 13:25:04 -0500 Subject: [PATCH 0658/1112] change flyway numbering --- ...straints.sql => V6.1.0.3__9983-missing-unique-constraints.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.1.0.4__9983-missing-unique-constraints.sql => V6.1.0.3__9983-missing-unique-constraints.sql} (100%) diff --git a/src/main/resources/db/migration/V6.1.0.4__9983-missing-unique-constraints.sql b/src/main/resources/db/migration/V6.1.0.3__9983-missing-unique-constraints.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.4__9983-missing-unique-constraints.sql rename to src/main/resources/db/migration/V6.1.0.3__9983-missing-unique-constraints.sql From f3fae4bf754572986fb815324a3a69a2632a87cc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 13 Feb 2024 14:33:58 -0500 Subject: [PATCH 0659/1112] add clarity --- doc/release-notes/9983-unique-constraints.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9983-unique-constraints.md b/doc/release-notes/9983-unique-constraints.md index 1e37d75d88d..d889beb0718 100644 --- a/doc/release-notes/9983-unique-constraints.md +++ b/doc/release-notes/9983-unique-constraints.md @@ -11,4 +11,4 @@ and then removing any duplicate rows (where count>1). -TODO: Add note about reloading metadata blocks after upgrade. \ No newline at end of file +TODO: Whoever puts the release notes together should make sure there is the standard note about reloading metadata blocks for the citation, astrophysics, and biomedical blocks (plus any others from other PRs) after upgrading. \ No newline at end of file From 240e851c7ba43b5037097aa2f0e0e827f2564f12 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 13 Feb 2024 16:38:03 -0500 Subject: [PATCH 0660/1112] Ingest/Uningest from file page --- .../edu/harvard/iq/dataverse/FilePage.java | 112 ++++++++++++++++++ .../webapp/file-edit-button-fragment.xhtml | 16 +++ 2 files changed, 128 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 479c8a429c6..b6706acd4ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -475,6 +475,112 @@ public String restrictFile(boolean restricted) throws CommandException{ return returnToDraftVersion(); } + public String ingestFile() throws CommandException{ + + User u = session.getUser(); + if(!u.isAuthenticated() || !(permissionService.permissionsFor(u, file).contains(Permission.PublishDataset))) { + //Shouldn't happen (choice not displayed for users who don't have the right permission), but check anyway + logger.warning("User: " + u.getIdentifier() + " tried to ingest a file"); + JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantIngestFileWarning")); + return null; + } + + DataFile dataFile = fileMetadata.getDataFile(); + editDataset = dataFile.getOwner(); + + if (dataFile.isTabularData()) { + JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.alreadyIngestedWarning")); + return null; + } + + boolean ingestLock = dataset.isLockedFor(DatasetLock.Reason.Ingest); + + if (ingestLock) { + JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.ingestInProgressWarning")); + return null; + } + + if (!FileUtil.canIngestAsTabular(dataFile)) { + JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantIngestFileWarning")); + return null; + + } + + dataFile.SetIngestScheduled(); + + if (dataFile.getIngestRequest() == null) { + dataFile.setIngestRequest(new IngestRequest(dataFile)); + } + + dataFile.getIngestRequest().setForceTypeCheck(true); + + // update the datafile, to save the newIngest request in the database: + save(); + + // queue the data ingest job for asynchronous execution: + String status = ingestService.startIngestJobs(editDataset.getId(), new ArrayList<>(Arrays.asList(dataFile)), (AuthenticatedUser) session.getUser()); + + if (!StringUtil.isEmpty(status)) { + // This most likely indicates some sort of a problem (for example, + // the ingest job was not put on the JMS queue because of the size + // of the file). But we are still returning the OK status - because + // from the point of view of the API, it's a success - we have + // successfully gone through the process of trying to schedule the + // ingest job... + + logger.warning("Ingest Status for file: " + dataFile.getId() + " : " + status); + } + logger.info("File: " + dataFile.getId() + " ingest queued"); + + init(); + JsfHelper.addInfoMessage(BundleUtil.getStringFromBundle("file.ingest.ingestQueued")); + return returnToDraftVersion(); + } + + public String uningestFile() throws CommandException { + + if (!file.isTabularData()) { + if(file.isIngestProblem()) { + User u = session.getUser(); + if(!u.isAuthenticated() || !(permissionService.permissionsFor(u, file).contains(Permission.PublishDataset))) { + logger.warning("User: " + u.getIdentifier() + " tried to uningest a file"); + //Shouldn't happen (choice not displayed for users who don't have the right permission), but check anyway + JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); + return null; + } + file.setIngestDone(); + file.setIngestReport(null); + } else { + JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); + return null; + } + } else { + commandEngine.submit(new UningestFileCommand(dvRequestService.getDataverseRequest(), file)); + Long dataFileId = file.getId(); + file = datafileService.find(dataFileId); + } + editDataset = file.getOwner(); + if (editDataset.isReleased()) { + try { + ExportService instance = ExportService.getInstance(); + instance.exportAllFormats(editDataset); + + } catch (ExportException ex) { + // Something went wrong! + // Just like with indexing, a failure to export is not a fatal + // condition. We'll just log the error as a warning and keep + // going: + logger.log(Level.WARNING, "Uningest: Exception while exporting:{0}", ex.getMessage()); + } + } + save(); + //Refresh filemetadata with file title, etc. + init(); + JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("file.uningest.complete")); + return returnToDraftVersion(); + } + + private List filesToBeDeleted = new ArrayList<>(); public String deleteFile() { @@ -948,6 +1054,12 @@ public boolean isPubliclyDownloadable() { return FileUtil.isPubliclyDownloadable(fileMetadata); } + public boolean isIngestable() { + DataFile f = fileMetadata.getDataFile(); + //Datafile is an ingestable type and hasn't been ingested yet or had an ingest fail + return (FileUtil.canIngestAsTabular(f)&&!(f.isTabularData() || f.isIngestProblem())); + } + private Boolean lockedFromEditsVar; private Boolean lockedFromDownloadVar; diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index 4dac1613266..e08de716cda 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -77,6 +77,22 @@
  • + + + +
  • + + + +
  • +
    + +
  • + + + +
  • +
    From fcdc24611d26889ba32fda351490c0ae657aef7e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 13 Feb 2024 17:00:07 -0500 Subject: [PATCH 0661/1112] missing imports/@EJB --- src/main/java/edu/harvard/iq/dataverse/FilePage.java | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index b6706acd4ff..4e5843964e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -21,6 +21,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.PersistProvFreeFormCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UningestFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.export.ExportService; import io.gdcc.spi.export.ExportException; @@ -28,6 +29,8 @@ import edu.harvard.iq.dataverse.externaltools.ExternalTool; import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; +import edu.harvard.iq.dataverse.ingest.IngestRequest; +import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; @@ -35,6 +38,8 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; +import edu.harvard.iq.dataverse.util.StringUtil; + import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -45,6 +50,7 @@ import java.util.Comparator; import java.util.List; import java.util.Set; +import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; import jakarta.ejb.EJBException; @@ -112,10 +118,10 @@ public class FilePage implements java.io.Serializable { GuestbookResponseServiceBean guestbookResponseService; @EJB AuthenticationServiceBean authService; - @EJB DatasetServiceBean datasetService; - + @EJB + IngestServiceBean ingestService; @EJB SystemConfig systemConfig; @@ -209,7 +215,7 @@ public String init() { // If this DatasetVersion is unpublished and permission is doesn't have permissions: // > Go to the Login page // - // Check permisisons + // Check permissions Boolean authorized = (fileMetadata.getDatasetVersion().isReleased()) || (!fileMetadata.getDatasetVersion().isReleased() && this.canViewUnpublishedDataset()); From 15ae19e36250e3a467452cfd41287df1cfe8bd3a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 13 Feb 2024 17:00:28 -0500 Subject: [PATCH 0662/1112] Change command to publish perm --- .../dataverse/engine/command/impl/UningestFileCommand.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java index 3e85630dd59..e9791809cb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java @@ -33,7 +33,7 @@ * @author skraffmi * @author Leonid Andreev */ -@RequiredPermissions({}) +@RequiredPermissions(Permission.PublishDataset) public class UningestFileCommand extends AbstractVoidCommand { private static final Logger logger = Logger.getLogger(UningestFileCommand.class.getCanonicalName()); @@ -48,8 +48,8 @@ public UningestFileCommand(DataverseRequest aRequest, DataFile uningest) { protected void executeImpl(CommandContext ctxt) throws CommandException { // first check if user is a superuser - if ( (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser() ) ) { - throw new PermissionException("Uningest File can only be called by Superusers.", + if (!(getUser() instanceof AuthenticatedUser)) { + throw new PermissionException("Uningest File can only be called by User with the PublishDataset permission.", this, Collections.singleton(Permission.EditDataset), uningest); } From 262fb267a2025872d8f537e937ad31dc0a25a156 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 13 Feb 2024 17:49:09 -0500 Subject: [PATCH 0663/1112] superuser only in command, add docs --- .../user/tabulardataingest/ingestprocess.rst | 20 ++++++++++++++++++- .../command/impl/UningestFileCommand.java | 10 +++++----- 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index 33ae9b555e6..9e82ff12b9b 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -32,7 +32,7 @@ format. (more info below) Tabular Data and Metadata -========================== +========================= Data vs. Metadata ----------------- @@ -56,3 +56,21 @@ the Dataverse Software was originally based on the `DDI Codebook `_ format. You can see an example of DDI output under the :ref:`data-variable-metadata-access` section of the :doc:`/api/dataaccess` section of the API Guide. + +Uningest and Reingest +===================== + +Ingest will only work for files whose content can be interpreted as a table. +Multi-sheets spreadsheets and CSV files with different number of entries per row are two examples where ingest will fail. +This is non-fatal. The Dataverse software will not produce a .tab version of the file and will show a warning to users +who can see the draft version of the dataset containing the file that will indicate why ingest failed. When the file is published as +part of the dataset, there will be no indication that ingest was attempted and failed. + +If the warning message is a concern, the Dataverse software includes both an API call (see the Files section of the :doc:`/api/native-api` guide) +and an Edit/Uningest menu option displayed on the file page, that allow a file to be Uningested. These are only available to superusers. +Uningest will remove the warning. Uningest can also be done for a file that was successfully ingested. +This will remove the .tab version of the file that was generated. + +If a file is a tabular format but was never ingested, .e.g. due to the ingest file size limit being lower in the past, or if ingest had failed, +e.g. in a prior Dataverse version, an reingest API (see the Files section of the :doc:`/api/native-api` guide) and a file page Edit/Reingest option +in the user interface allow ingest to be tried again. As with Uningest, this fucntionality is only available to superusers. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java index e9791809cb2..ba04c4d7931 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java @@ -33,7 +33,7 @@ * @author skraffmi * @author Leonid Andreev */ -@RequiredPermissions(Permission.PublishDataset) +@RequiredPermissions({}) public class UningestFileCommand extends AbstractVoidCommand { private static final Logger logger = Logger.getLogger(UningestFileCommand.class.getCanonicalName()); @@ -47,10 +47,10 @@ public UningestFileCommand(DataverseRequest aRequest, DataFile uningest) { @Override protected void executeImpl(CommandContext ctxt) throws CommandException { - // first check if user is a superuser - if (!(getUser() instanceof AuthenticatedUser)) { - throw new PermissionException("Uningest File can only be called by User with the PublishDataset permission.", - this, Collections.singleton(Permission.EditDataset), uningest); + // first check if user is a superuser + if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) { + throw new PermissionException("Uningest File can only be called by Superusers.", this, + Collections.singleton(Permission.EditDataset), uningest); } // is this actually a tabular data file? From 130cfba92e9f3ced2e9497ba74b2f17b20bfec77 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 13 Feb 2024 17:51:37 -0500 Subject: [PATCH 0664/1112] release note --- doc/release-notes/10318-uningest-and-reingest.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 doc/release-notes/10318-uningest-and-reingest.md diff --git a/doc/release-notes/10318-uningest-and-reingest.md b/doc/release-notes/10318-uningest-and-reingest.md new file mode 100644 index 00000000000..7465f934330 --- /dev/null +++ b/doc/release-notes/10318-uningest-and-reingest.md @@ -0,0 +1,2 @@ +New Uningest/Reingest options are available in the File Page Edit menu for superusers, allowing ingest errors to be cleared and for +ingest to be retried (e.g. after a Dataverse version update or if ingest size limits are changed). From 67d004fb5719f94389610227e070494f0d652ecd Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Feb 2024 22:16:33 -0500 Subject: [PATCH 0665/1112] add redeploy tab for Netbeans #9590 --- .../source/container/dev-usage.rst | 58 ++++++++++++++++-- .../source/container/img/netbeans-compile.png | Bin 0 -> 99396 bytes .../source/container/img/netbeans-run.png | Bin 0 -> 124521 bytes .../container/img/netbeans-servers-common.png | Bin 0 -> 89185 bytes .../container/img/netbeans-servers-java.png | Bin 0 -> 68487 bytes .../source/developers/classic-dev-env.rst | 2 + 6 files changed, 54 insertions(+), 6 deletions(-) create mode 100644 doc/sphinx-guides/source/container/img/netbeans-compile.png create mode 100644 doc/sphinx-guides/source/container/img/netbeans-run.png create mode 100644 doc/sphinx-guides/source/container/img/netbeans-servers-common.png create mode 100644 doc/sphinx-guides/source/container/img/netbeans-servers-java.png diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 9fc9058eada..85b1b3e5f05 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -154,24 +154,29 @@ IDE-triggered re-deployments You have at least two options: -1. Use plugins for different IDEs by Payara to ease the burden of redeploying an application during development to a running Payara application server. +1. Use builtin features of IDEs or plugins for different IDEs by Payara to ease the burden of redeploying an application during development to a running Payara application server. Their guides contain `documentation on Payara IDE plugins `_. 2. Use a paid product like `JRebel `_. The main difference between the first and the second option is support for hot deploys of non-class files plus limitations in what the JVM HotswapAgent can do for you. Find more `details in a blog article by JRebel `_. -When opting for Payara tools, please follow these steps: +When opting for builtin features or Payara tools, please follow these steps: 1. | Download the Payara appserver to your machine, unzip and note the location for later. - | - See this guide for which version, in doubt lookup using + | - See :ref:`payara` for which version or run the following command | ``mvn help:evaluate -Dexpression=payara.version -q -DforceStdout`` - | - Can be downloaded from `Maven Central `_. + | - To download, see :ref:`payara` or try `Maven Central `_. 2. Install Payara tools plugin in your IDE: .. tabs:: + .. group-tab:: Netbeans + + This step is not necessary for Netbeans. The feature is builtin. + .. group-tab:: IntelliJ + **Requires IntelliJ Ultimate!** (Note that `free educational licenses `_ are available) @@ -180,6 +185,28 @@ When opting for Payara tools, please follow these steps: 3. Configure a connection to the application server: .. tabs:: + .. group-tab:: Netbeans + + Unzip Payara to ``/usr/local/payara6`` as explained in :ref:`install-payara-dev`. + + Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara6``. Use the settings in the screenshot below. Most of the defaults are fine. + + Under "Common", the password should be "admin". Make sure "Enable Hot Deploy" is checked. + + .. image:: img/netbeans-servers-common.png + + Under "Java", change the debug port to 9009. + + .. image:: img/netbeans-servers-java.png + + Open the project properties (under "File"), navigate to "Compile" and make sure "Compile on Save" is checked. + + .. image:: img/netbeans-compile.png + + Under "Run", select "Payara Server" under "Server" and make sure "Deploy on Save" is checked. + + .. image:: img/netbeans-run.png + .. group-tab:: IntelliJ Create a new running configuration with a "Remote Payara". (Open dialog by clicking "Run", then "Edit Configurations") @@ -212,13 +239,32 @@ When opting for Payara tools, please follow these steps: .. image:: img/intellij-payara-config-server-behaviour.png 4. | Start all the containers. Follow the cheat sheet above, but take care to skip application deployment: - | - When using the Maven commands, append ``-Dapp.deploy.skip``. - | - When using Docker Compose, prepend the command with ``SKIP_DEPLOY=1``. + | - When using the Maven commands, append ``-Dapp.deploy.skip``. For example: + | ``mvn -Pct docker:run -Dapp.deploy.skip`` + | - When using Docker Compose, prepend the command with ``SKIP_DEPLOY=1``. For example: + | ``SKIP_DEPLOY=1 docker compose -f docker-compose-dev.yml up`` | - Note: the Admin Console can be reached at http://localhost:4848 or https://localhost:4949 5. To deploy the application to the running server, use the configured tools to deploy. Using the "Run" configuration only deploys and enables redeploys, while running "Debug" enables hot swapping of classes via JDWP. .. tabs:: + .. group-tab:: Netbeans + + Click "Debug" then "Debug Project". After some time, Dataverse will be deployed. + + Try making a code change, perhaps to ``Info.java``. + + Click "Debug" and then "Apply Code Changes". If the change was correctly applied, you should see output similar to this: + + .. code-block:: + + Classes to reload: + edu.harvard.iq.dataverse.api.Info + + Code updated + + Check to make sure the change is live by visiting, for example, http://localhost:8080/api/info/version + .. group-tab:: IntelliJ Choose "Run" or "Debug" in the toolbar. diff --git a/doc/sphinx-guides/source/container/img/netbeans-compile.png b/doc/sphinx-guides/source/container/img/netbeans-compile.png new file mode 100644 index 0000000000000000000000000000000000000000..e429695ccb01c0170ee612ef1e701aebb68029c7 GIT binary patch literal 99396 zcmagFWl$bX7cEMF;BE=-?ry=|-QC?SxCeK4cXxLJ!QF!gcXv3G^W{CiZq;RqqN<``^OkP$D9tH~r1Ox)s;ptvbEx4`OQBSqI#eT7lOy42*P#|hf0cr2B8=clrGPRd|sl%sLiV&f>GtC zp)fR4IQ13~5zzPo>$)ccbJ$+vWp_O>p5r_DdNAJRGzOCE8c&R}mwycM2O4&;uJ8TU zVT96I5t0AX=XbbX3Iuk&!N_O?$Xd8NU-}DMP~lZQr&!B3r`H|~cs#xwSP&F=2QK=Y z49F`&kW0yGacvL~h0zQ-S|ka^N`5Igu%Pd8XlA1hMQ9~sSG;L5UNo>fsvwEV95@bS zAmwTVa`b;p1>qNbLbHOt`6Gd_at-pjyC=9Kl?Yga*$5*^-C-W_9U8yOFng;f42<9m zT{ry-SM(#$OB_pk_buM_P3hDgiAAJVSE(N)tea&W=v<#n!y)@gh4cVDM_fR#81xwE zN8S+fwa8dP5arK(DIt5#XD0|}0@ob}(0&58AP(_VY|QGdIc9JH>(Ffkff8t#$_W}zC=pMA55lb@2FJt#&b^r>$3%&t~oyrr8csCl6cVFodR z0|v!IRR~?jr@~;BmLq9L4Mh-ELP_gpA884q_h19x_MX)ZesEBz8tA2`@HOgMOC|f^ zmi0oy)b9xWdFs<_!_j!Pr(H(xj!k1I9GJrY!rO>}$44OF?OGW^FCD6M=!Jy-I6Qi= zgQj5H*pJ7)djKKj&K#<94ZDlxbTtUtO1Ss@{YNPQmo6T;ZsG)VRPt~jJ><6EXHXDa zaeW!c1OYar#s}8hKHuv&g5FPtvCxmb0)223;_*>X2H23{y*Wr%0`HgKd{-#&(FEU5 z7^v-V`F~h9%CiweehcP64D#20)Fa9u#ko(0%H|inM7+RJZ)4cdRxjH9$i_G9VV3?C zME!LnR;+zBmXtB}D?c0|1hS2T_ixCD{sDs%$O`7kUY;L|SL@lAiPHk#gX zsd_oEAio@!^^3wCd?m*E=0;kDTV=@r-J;reJm%Zt?Y39L4>6znsb|8MZ_6QX)?u{$ z2!;o>O3R;5zb=9tsxvQ!HW>^TegE#p6L(fkXjfPN-7zs!I{ozqBTP4 z_|b_h<*6LvHRACE>k8WCW*x#hgMg(n6$hpnB76liGEh*5N%jw`TT!P}jaD&eQb+Ba zT&k6=M0^QFb-E1)p6<3iQeRsJ;a98dXSqikwS>MjC06?AroT;M__AEYn-~+H;GdMUNVGOaT1<* zo{w5(G_h=1UC|9$TOvI&w*)>V7K%kQJIQaP8B8(t(TY->LTz&H1(lW7#iw~V1vK*6 z6`iu~IoIxcbbB#W=~S*%G)cDoKYgXv-@++bCc8UO`)2L zn$}g;RZLY~nrE7CtLdwBt6{5ijns`XR&K}oT&i5wS=w19T#(MT$6i4QLOSupH_)9HwF*7;kJrW<0J~{r${wbf9 za_6`F`gDZmrzaT}?SE{u4X_P-A~q_}Cytx(D3}xV9^xAE8Cn^th?4vIkUlN#B@Iqi zA(JkXJ=b(3?Lp^_+m*CoXK9XQNX58rxoZ2oGQLXFSGim{dFs#kGt(g3pnJ9RXXj@B zAW^?zu}IWU9jSWK0j4p>sy)uJ2h~MY302rCtV)kcm#URTkww=(wObJmO&D3egQmc?W$iG_H9LXU=1e zAKVz+Y#iMI9v`=*&=`yr!J7%?swZF7CxB9p^xuJF}wBNOBwU)H0wQm}lGv;GTeLdiT!h6Sa z`WEf(b?baXbNOxSDJ3OPEqckeCcAFY=*g%h#{1M}f8uK6c>PG|LE>g*XYR=AcBPOpzChSk)I$vH2IkPyUlpiyMWR!12Wtcq_9i4^gNf(X5PHF4*yW{)Eqsi3+ ziZu!eG;Nq=SXtOY*hKL_lS4k!d11*9Y3mexh^f(kUsV4ckxVxCTa>jP5_qw;9u2gS4 zo;F^SUd=E!FsYe6n7WwNOzuseOq0y+pw$E51JqUsS27wl4}|wl$1%rcMytk@My1BM ziRRJvp|r@c<+9n@gpBE_kX>Z2GV4txdpM*!iSb8rVmH-uKlA-rBVmizBhd-w4Mq>f zFD8@a;gB&(x7S>A?l_7l;iHD3dbWY!4dgB`DLxu^XYMd%G_%Qy8kfykXBzgJ4B%XM z3OqPZ7fVZFzIZS!*-83ocR)U2r>Wnle;zR01OB0?l&?ud*VZ&`wY@L$NYs~q|I-Mg zM(?#MQO%=nysWbGbPVM@KB6c3-DSNdP>MOZnchjO*@Dxd&HiZuy_?##YF=HV@%pMf zROPPXs6tAuM~B&BBG@tIdcum*iqwjGDY(W;+pf;%>6sq8p6z$(Wld=XOQp8~X^@<8N>~me!z(sLOkB7qZwt9AbJEJP0vqi3~_Ihvq9OKQKH;WfLJSgsI$5@Bj z``Ep_(SgQ+qC4@o+voQml|Lqj{tQJE=ZR^iccvrYyL@ZENj<6y-(4L}$;!=Cw>PY9 zD|O#0JT%@c8&`_aB5V51&2xTxx0t$k&um~enFG(|`HXX;edB&{Gfekgm%}m6p>8{V zt>~F;sd=EMN=HDZx1J)Ibba0v(1m?WlL#+!YF{HMsBJ;!`!60+ zc^_SzVEw$;wy$81D~dpoWIhvYfaMwu!7b%Aq@QY2(h7e^SVnnZ<)O!Y%^>1)dPjPg z;)yM9_se){uLk+1V-YDG9VxxWFR1ZK_DQ{z>wb|2#5;<2*|NM2nbgy z2*?-v%yxxuzy}b9Qewg&AAhghu95^`1lnF)!wCcggY@t3CkaK8OJESvSwcnx@(>gO zl!+6U79SP_1VP$ES>0LDz>UD((azMu+JwN_-QI-Y{}LDV;Hor@coJ#^kq-?K`o2$u z`kaxbT_5nbaforq8fkZ@X)eYJc+t#I@M>y!=RHQPAD3~&5N^w6F+$$F*Lk~8@29h6 z)KUnP;>k$S%8D|yIAXfN!(N=L3tk7V+;><`;~|gEg+fCPzTKN2XALV0B-e7okp|r) zFDdvB7e72}@+rllmTcNzSUJ#77TtLc9d(Nk5WN1x)_8i81*I~lb7#N5czky#y;0u1 z%S}A+-+y@L(tX3SNQ8wvhi65AD--&lIr%! z7u;+}SRv_k51Hj>8s+og>b18_0*sO#iA3KNW#8EOEX3;^MBd7qJIa99eYes0HXokH zl>Lvln-4vxh3L#~e9~{4c$$u}@wQfvQf59FXYEBLmo&~#VR~TH5E_na3wo$;bn2?z zY^^T?Gs^^Z0*?6H;C6Vpq#Fosk5c(aGEcg~wurY&Ljs9Wkl2V1oSz2DRTbLK9EP2K zi6@F!dI;`Z=g5bzs49kUVptBQ*uEaznCCU{a)s~k4aXS1e-xh{m^r*2_ zr{GH&$(y33A8bS53?aX7Li#Sa*ZS>dvr#W(Xz*#hNtmu)9Ey*e_@7rNNM3w82M0+X zQlG!M_2g`laMWC>Ubj|Hf#&S4Z<4>(J<1t%Mma2PJqWlqP8nT0f7F%ksE)cN8D1XQ z$BgDjF-9z6vR7ZYt~St5!&LdOkG?>hHwg*8Sa?)VH-w=U9I#leYDzu&j9-W)AJLuG z6MJzOxAVBa9pVY5x0A?}=%Z*HqI{reg%WZXrzQGzMh7vGo}VnU%?ypY6=_YUgZsMt z>cQH9PVmelm+q!jZ&T)?9;~l7h2LwD=}GMRB|=?{U+Dhrm7QQ;5Q;4{)&?j%C z9Yn+rj5j`!SJ+O&Z&-%(r>(;;L@N}ABiBTlC@x@^=qL)?i9h4ZfA4{Zp>c%t#bw8MSG1Wy^lA%56eWTZ_< zt^dMak)xO+GJIaT_ReIJ?G}D53LEE8BNtBUXxgtq_7U0Jj36EZzt45rH zij*K{BHf95M5~kTnnqQ+9?{di3y2$QP#Lt|vf6D)3V{n{sBRFokq^+ z=85UoK2olcIxtq;L~!qz$>j}N4}w$N2{&nu-2T8s7W?41Apss}no6VI)R17zXl`}q zReS`lpMq7ms7>Pm3uJVBkYM{<$vHi=Cg0$05E^_QYj0<_ykWN5L-Z7AAV?`by0$DkTop)hsYExf*0+N;&8-cp=1uZZe5m%F7xKgo%52n`rb@bkOmHbhe=W zrDmbjgu*${R`lL-q>3O8K|grVWbRODs_kvi6^38d7&?TE9F$SxSr9e}eHy8^c&B@z zuB!omJksZ5nH7d!ziE_iU6P*5+vm+98N-ZY-%sRI@Xm9AnG0SL5bUw-xtliy4q+of z=ZSqzs({V%Zxv=R$FBR4hx8c(hdD5fSo)T!;m63N({{@DiFjlrIH)QASZJJS|2^8% zKc)$(nzY6 z8&%Y!G)ReGQ9-g`T%Mp9&oIxDqD`GQRh2oEM2W(UQABYWr*I-d^2vJ`p#yujRO=IVuIB@I{Nz=o^W;|w%UEA(e@@{}Fy3+oIs)l<#G9xcEPh)5+1wZkGJ z!Q=Ju@Y$!>v+LCCjfW`Ay5?ms1)9F+q0HKB6~VgyQvQYK5nqcOY%k{J%H03;=WQy7G&+GieXo!s~USCWJ1&A1(Lb&+T z<={{CE#pc_7s74vORQ>{sf^&w@!3t4c}c;RHkvJ*lG}gQ`jG2gUC*OVHlWg78sXh& zd)a?~I+m{Nf9a_9XV1NYS6JrfvX140RG-KtC2)2Fdq(skYq{ zq$#Mi{yFB}a$7y<1g;N;1Oo|-P8&n(k7O%4!&iGG{zr%?ASYrem z3I&@4p$vpCu0cV+fbm0qjr`N4uMuy6@{>Y@Pz;@qjKI;)8RQa!fbdH|;w>j^wXU&< zn!S(prh!=Tx}RZb`l6he|pPc1jI568?CUCI0hX+ za^Z{wGtfomi3*aV^*PZ0R4?+_2Pm#dSG(4Bx9CU&x-giYYLnB6Vw96~x3Bi=kU^fg zTtM^%ZOGP_8#7e|oHv$7UyDF{VWlaCQuTMnC2Lw*L#POV!I$)nsS1^=ij$a#gXv=` zY%>)!)$0%!HOi*msYo0Zz^87ZwL-Od#c^X;Y~p+kJ6&L8Ve4zx4Ef>AH=-ukkKgAK zIrB+RRy`b=UaKTLOrZQIC9R((5w=`OjxyowkVcOMeiDj` zv$Yv3mPZS`>({^tP=e{e^%~yFK1s*#;Mm62L3|h8nf^q@uYI$O<7a7#hr5aa8RGwA zh^H)>%$`qzy|hlY)(y0gS)s-s-{Kg2NCS~)JiqW~upL#<*AAoV6<4u`SiSOC1*}_G z6zwE9Ub0BDbNTh(HVH+gkX&zqUCdkTVJvLif$|XCTonfDDC+N0XjW8@1JZ)!7vqSvuoS&_3;dvOaiA}})JG$GJ zzycOJs1r;xu*=D2p=}`&VvZ(~@;k0_2+}WX;}v+aO{b021b#{c_(h(uI8yH#&fZ@Z z^pw{Q@6Lyty5!&ZsFrIu4a5!#c0c&O zsy>nap~QrH^*9rP;L%%<$}A~GGq}#&#<7d`?jqn6kvr%RUxv&%INspm<;Cv_@8+U6 zp!B+uJ^oQvv&8=KIJZ4KRrcV1eo}DzNOL=Lsr_1dFENj{^;1$a53s5Fkzo3u&oQC_w~0u5n6w!Qo--LVV}6;)W>oWbnz*WGotp z(GRu!Sr$jMcJ2!xji1j?(vo@mc%w5EGO{E>Y`C?H_RcVF02sKK-D)?@9ACZG_vXM? zeR9>hCPw?yqe8?ni2kGF*EHWg;$nP?X{I@#2Y^Tj3n+hIJl$P3v3SLhuG$o4o`5kXs9k(ax$HseNDo^QydNY3d}K!c?s=vo3t5kwKhISlTw|HeFZb9#De)_vpMt%n2!4(iWO@c;KmK#b7E1+2Yw zb5!FS#J>r+Q7z4ul?{p2w!cUArP0&OEPMVvYtndQ?`@$7m!$Qg~rOoed9v9gmM%g{Cs2+X8 zaoH%N%uyHF1=q0m2;j>3{oYbCSvH^2yvV8ZS}sT3=iM&NaoaR`M9};Be@mUjSS7ai zu9{1GIY*Kep|;a%zH`Yy9RXR<=802L{J$pjt%xc;mo$8?poedhvSkl>Ql%3Ska zX>PMgcPdTMTq!P6bGpm05))gzGLbGcy`P)?IwOdy7ftx=`y8Ei=sR{Wp0@F^e;oSP zCg>i_GhF7Y42PYi@#f?5#pA|Bgw-|~&t@J%*&%^dN4PTFlU1b>;%E99UOl?Lm5G&i z65(6D+YjsW9{y?7_G9k3_@8R|yR^%akc(F?-kvQss96>>=cU{c&A-=KT*T&Aah0^r z`!y@13fNfP#3W$%Y^vsxGobZW52Z9bPn=ahNbw)YpEsM{?54WqoX5Rv2GUkPibq90 zyf1v!e9dH4KjapyhE{wgPDkH8t+3m|+HIP&=lVS~OE09d_BlvxkGU(5ZIG93G!CTF zYVOiCHy63Z%5Yt{67Cy+c0||Q$u&kFev}0s_5hdf%Zan;$4}Oqc|Nnr~n%!#WFtaU{A}v%0gj(^{J-BWhfu&R^RtS<%#5VW=B2rHd^NQn{DTaa7hWc zHH}}mz*-B;U|N0>`BKR!{|nbcn$D`5jSH={jg*Ho^!e^;yK`^P=NVHjK?>L8U$zy! zwse`&Qe)kn7mX!pIh~1TA9vFP3k%)mgZ> zpz{9i7!bSavn--!IpJ^33p;D7JtvEUPFljQQac?*%`Wrm z(Em#ZnG()}!eV;h&Yq*`&zrN3QYPJ%Rv0dxR5R|$*1)jUTC)j(*WK=Y(dK_g1lA26 zWKKnFswIdjc~-jgxEyuTg62At!q%i=-J)jR5`6!x%dTrHp-=4xaR~`GoV;gxJp04M zi^A@N?cXy>n8;C{t2rxNrOt_9wm?t)K9_#N!Lbu}=~<=j&NHc72R6j(_dV2EvwDL! za&6QSJ9&Kum6BiVNX-4_WUDOcXfz0 zOGv5p|9t&XZziU@Ngy1o85`a zgYyaBS*cU|eNWk$T;oSK-hnf43(G**{ifCn%L>MnE|ra#TCspkm`R4SztmPxp-+;k$JM2=b&Q2sfkGt4Zb2HhBnWfKHn*T$YbmsHH3 z{kJ;Liw!p#j4`@n+KuVy`t@b8jxwfnN-K-DvWKc9_Vt$LXc6qiCbIIb%@(rq7;5%3 z>D}QN%$up{-d@JNJ(Kzc!@fO7!hfF3;T>m!8Q90KZ(mzcS?@SjOskbsQ(3+mdO_5| z>3?=2JW{;z(q8?^26+{iuzvl+yUvvTK~b4XrAGpl{2M+V&bPA0R&_@X9C1a@$eVv> z8=wR)q99+f5cDI2`sd!B)l&85rCiBe+R)YeDs5M;33bcE$IzHe*MmzIqzd-)?_`O9 zZdq6BieJefHa)!Fwnyk++(g_h$}~f*|AhlHtVFy#S}<3J3=BN<(Y`&`1Zh!`)>95_ z*i?t*zPFV>6Bl?}!EjL$!gsqlj5ykx9ttAH^@4(_mIciw*N(209~6F43ier{HO-2? zVhbDXtKXRg*wO{?cKGY6m6W6(WM`WJO= z(`QRrt~S)R&k`seSdYcUC|;Af)-zjqCv8YLC`wa$bn)$)=2_g4_SAneRQ-^T!jcH6 z=Yk}4am}t*XYy3z-Wm_^hz03^p%7H7{Tp+pBOisIZ+Y1CInXX>Y6{nrsr| z=fsr-(#@Br$Zt12SL4ffeSJ+qMPqq0NBdA`^7K6;P_pP+U0Iv%3tdXZSavq6tyzt3 z*YuGKjx_fnX3L~-Jc*!?P@GBH?Ch-R!9eR8r9}MqSqda5F1U)H0X=;^V)pX#7uVt8 z*2hKJC(FJUmwvP}+NFujKYm!El4FT=u)3k>kqJB-P5^=D=jZoTBbgN;q+kRYPQKz% zNgWAQNIJ;kIM`MrD=-Ru;Z<2#Hh5qqe=$jg$-kIC^3$g~K`qddfv^>A850qcG2j~k zzuuXOuS8!H;7S8ed`oeYWG(MCOEt}gH1^O8wFp2&qp?Q(K7k!qp|fF&#bDIiUFcF# zQAMvU(yso-Pyvoy9ts-T;QG&q^?H3$et-UM^VQHA)`{*HTwED@afY-sjd{%~JcF}~ zi>X`KV$`o+LnllJ4C`Xk(j<=km2nV;9g^3JSKYKdGtAYLY%E7DanR94%*`p%($bc# z+2VhYBcq`W#+?$UPVAGd&HjF03j(G48LV1iGMY&F6%L~N{n-kLG>2hjwBX)cenTU} zk+HFY&d%Q@4h}tb77`zwHR`KL;>_g$(#hCbNF}G?aQB4%0&C9AVwkn|vi> zEp&4Q!~I&9Rl~d5{b<(0?yRSjY}FR_!9)-o5}b{Xj^_})U}>Qrpa{}M<7CrfZz>v$ zRH;yqOu`Z`oU1A0TaeA)l>TN4>k>X|9Ad0NM#7S~wWY6YnKVpQpja_|a$7o2RjF## z5FSG58f8oJM%WyhYwerh|GBrG?V02B;NLB4irW#-6PI+G*r3Ux>+Hb>6IrD<5v ziUljpQMap>hMu0BiYmM>5K>9Urnt5BYr;qn&Di71OMz-dSw)3V%@QgWR=8|QL3K4k z%2;rpe(%-=BEP;o0bmQ4R+bqYU=1N5q50cu?aHxy;>SW9DKzj1$s)t?WyZfR4JZ&R ztE?<6F8&b^02&@14&)3!KM+Ad!B`clV{5i44oGnD@6~4aT=+So?%tvvUD<%)%2eoK zU??-#x2J0@h`=e17++Ub0M@9Ez~Ql^Vt$#-Egq6~aDYY#0n3sVv$tpP=)&t*k0y}- zhkzg&V*-YoG-dSl3FJRQm(5vFP*4ISi~3zF=|u~Wvt&agtH=ll{My^O<}F$K`uYI3 z!a)cbhRR>C{JVvln>oNonX)BFzd`#j{WkPtBc#Uelhu^1^YTudr1UO}4Nd2S*ORp+ zWJ6Kw%wH@gTvLa~4L`mkOepcu6$B*PV3CWa}K1{uDQc(XAmxh8S z_-SpRVqua1zl043o(lRqsnb>WW6B(^*Rrapz5Mub6z1^dh#~sArpMbNzx#s$tO=8? zoSpsbo(L&ZhJs&42A*22x-dUKfV#Rf;PH{8-R)v1mCEgm#AQodf_zSso z!Lq!(T)n{*{{8*^R!&;DWRB71nZ@V%ikyj=IYwjbgwu;7qev*JBKkl%_B_s$P_=$Fh<$9;&BIb?^hwsl$ zm+K1r^Kwb09Dw}tMce*UoI;$kva&u4R$0>`kpv2{Xr%t1QiVs0rukKC>*d<)_#Kp? zNZbg`D$~=}tNE3)KY#u(xEw13`CGt0avaX}Vs!$@X-dk8 zO^c&X2U#&aGx!-3LqqV`5)u*yvwtK82M6_AmiT1Un=H|Z#Ucr)xUe*mDSpv!Ep7Vz zJaTq?e>#|J5R*eg1GZPA%aRrN5wwN{FJTPN_Y2Z2ijR7g!|(r~S&h<>_?>)KTz}GN z+-EykzHV#PLZ3kB-Nw~~VKV61(JcKV5xa+k#6}5TUV>r#PmDm3r6UO%nsfa)LjvEi zDP1c>V4j4Y%#=QE#L4oX&sv~De7NLPHZ476u2-0EffgZ^lS;j<1f-^WmJ7lQV7bQ&=Px4_1|`6tJ~G$bJHYU~v2+ zK8sk?U~|Ieaevx(I8$)6(Z!8Al_oAC@)>Xfp@JC(!$I-zh=`Gii9TDmTB{{nNlD4* z7u$*js@JzSo@6Z`f+dTHNLYfVW0B$$5=N$`c7Hofk#cdRPh_w=alA16b<{1tzZT}_ z6FIbLpRP7x`}+D0#bC?H%OfR_$q>S8)r0}g+{r>)bC|KA5eFm1pms*2?!-bA7A+v~aIXN*i=E4G3k?yjS z$ct5aA*vMye}2UQWo>p=5wNzfsMzuG(x6{(07%Iztp>!LZJcnJOhLW9{FNBgT^@J+ zXkW!+vU#SDoR^QSGu<}a%^R^6-|LdGoZt^+Ia6vc8GwBX3lGmPFONT4Ye7awAL$hU z8~MdCxLBq235w-2lh>m?1l-qO8(p3&9d7i25b%bFwRt+;>9F7d2!in>wD2wlPT9y+ zGSSwQaI6!ZcJ&ffHq+@M9khgR>1)NbS$0&Tp$~pk{0U4N$s_@+RoI6@ESAbB9N1w7 zKfTG>JE|n?wh;TcY(nEogwTW(%)n>Xu0rEFjR&zNt4iXDevZ)p=n5p$#vb{fD5gN*aW5I$iDk@4$Ohm%QmNFHEP*G9I9!UeC zOA(C0<7vHbw_?PKQdU!Iu)E-miRga^d(mj{fbS#h13Wo4C1rSDCL=65x>&Er$N6Lf zK4W6PZrs$&3>61Q_+q0A8WEA4i3tU$968vl#VX$SZ!B z#Sgsd*Z6p0ef_oii#9kUBq|Uoz&&rMFS{2PfDe!W)g`~5$k5R6E2ksL=g(rC-!BQy z#bv_t^_kKhZ}xo22y)*kHvnT+I^CJ$NpP*Cz=4HS>U3aaOo&=r)5Mb)w5*Mcj~2@1 z@RHoh_D)UxErJuK#PsZlCW(Twwg@7@A{2DNwU_pWyM}<>V#y}JmO~=qk#vh8?(FU! zEmmQRMdS1txP7mDe|vc#??scp&_u-ng;7>jt@pUY3XcdsT<>59-gdwMg@Vqn(W62_p!hYe6l@PMw>eIR;5GJGc<>l zwoMEOB~oUX_#JN>5I73D3bGawlq)MMkN7th%`28Xacn>aqDq2^5)CAg8V*Wmcn|Nb zI|!otB3qI$J}x~fImVIcof>`}?%`{uA$n3GQl)CdlqPS*l0D*4Pg9_X?@O6j=;4qO zA1?@0%qRCJ93GFvePcpK5B0TJ!SGKM1_bAOM06+B)LjOIRDy^3MR(Gz{L$zEXp#(C0L1X`?in4eLv=_Iwyg$qrH#a@MxYlk=4$Bi ziYU}-(#q&H>d1SRV(i)r-Bc4I)Z*3WE&ntY_vcSOCLcIkt_$q%7mN{iiqLDpP8rzV zl7A*7lgS?Z?As0^5r6r>UTbng`2NLPoRFC~K_CzU851*%MB?^*Jp=ILgrqbP6%}-& z;po3r0q|^K;Ah;aHGqwMe0%`7>Az5+s-&$AH}A`%LWNFBN@}g%)5n}Q`L_N5r7zKM z&6bdq^mD5(u>nUhxNSK);K7pY? z!y^7IBY1e{SI9M+A9dN3Fl-s{RjOk_(5UJ!!_2wp`siG+HUYP0Ri>K3zu-U|)_Bqt~5?b%XF1bz2c&#bK4 zS_YUHjC|FK%bS}4Yc^DUm|k<1#TR~J1vRyhdl-?@vw0GaSjI+Oqa)l;p^ko z-*~UDX9D8ge)(i70guN61rt*Oh!!5t+d*xc^V#W0Q2$(^D~zSfo@t*b5m%#4>q{Bu zIZ1b^GfPKL73w5SYCGG0BPql7GUynYwlJ2vctN3nsW3QLsd_dT`p@!~Zb)Y(q^dPm z^R>@CxGJnPlr#m4dzST1vL&cdqEV_RONCh*hGlu3m7N+Vp#`&HWYSS=noh63e(B3p z741Awp=BVg1~gzx#Q!}g04`CyA+cpl5LYf(vgYOGp`oM4B_ssqQvij0*Ree`JW{Yq zp}}g2&eeKT0N^KqkcedL>?t}OZbnU-hl^F*f9If~)xM-&xVUR@66O&CnLELWBm^k% ziYh9>3zk3@DOfrgnHnoOUKTcM($t}+frSOY3Xmm$3>>qSs~?Rsl2=-blrr{nra%lJ zX2PPP0Nq+?v_OTt$In%((G3a-Nr;OR;O7Sk4i2u=YRR6BM+t|4y_$PM#>S3NtpFS{ zI8q1`JoVV_eh#tN6TJSXp+J!;K=UXmDENyM6|5@udi|BJ9i5=zU;yX~Q8F-cuG29x zGSX+-fQ*bO;OxwzlB8n&yEQH#AOIp@?q$EY4vChU8em4L(qvSl)D$#Sh4uOIZh?} zcI)BYC3m0Q1?=yZJx6F%RG%@E0|0gI&(_3rbh40ELHjJC`}A{53ytM0G@d+G&-vCB z)9)7V_bN(6E0Vl*mN4?x`n_QY(hBIO4^(%Vr%4a;`YX|jsvKCGh7~9%sI46-x?w|VIzkkz6R*;uZzht%g&HLcf zs7XtgGUk+(%I1wV@B@B#H-@jcu=PEC4}tSgXZY-Ti`JuSxX6KfJ%47;F^?jAxN3;I zFwd=ReY@Aa^0(UWVdsv>Rz7C8j(IFrx{@(eu(gh^dPz}-1@dQwiiNu;aGdDfp{gvH<@42B2B-^25kO%&k)>$Y}C5o_Mrt+c0t#sGWZ%QG-I<&J%x>5Gg!h(T{l|ITTV znz6<$)-r3}*7k+=)vi<8Y7cjhYgV<#`eJYKw=h=dWwLc2ioEv0*1&HfANneiQMte% z;k>G7;wCn0x+0aR6MGH{TFSik@=<1?&2xKsvxcVg19Ja@eud1;pXm8c7-o2mGZmVgHIpWFQ z7@ERtSuw-~Bx@u)*jh1O@D?7u=LELz=s@5hz%G_YNp1G&8&Bg3m7wa5L zAB9^kx{XvuFd}CrM3>gCFM}n>$xeuXfgdn$V~1} zh(h(3MxZ#&|fLAKb0#@A6M z_OHzb9?%dnB)ImbxBq>6J=0A@$)Qn;YXe$@)S~7jw1$}lTdU1GENfj64aKe2@n46t zZ)LE~eI9<}%?`y#OIsXkc$D{@0#{gUXvVg>@4UVl=Dhf~So8wVej*_rChWhq#0mu- zabpXfHI7gRY23QHeak-HEE=sw9XG9kj8n4JMg9GcSF73lGkZ=_DI3~-qbrmDEHq(Os*lYh66oBCD+N8h_RFjF*yfIRh6v`pdG6Mu%2p z&e=OY76yPSzF`9Nkg)l`cBDPYUkGsX^NSt}X}MOt<=8xVoqzFLW5)H8ZA|MaT7;{U zf^hn3aT(Xd1cc2@k+1^V$dR)Lirjyl;YsUglm0u56&Z~tt=Z&_F z?<{|cr`F$OJ!eLNH2+EY5tk^dQ9^?3q_q}#IGRh%GQq{RWwHs+jg)!o$b!v95G_B` z*AubB{x#*cP7B(8IagUoo9lYc4l*@v1{Z$9CDqnCQI?9jBJIp6kydZ~Z!MbX5sC*9 z*b+*QmY?k0j54^F!)DVXpc?6#GhkoH|DORIrq(vH*v-REpfxU8{b8Do-WKXsdGD*& z@fN%Kzv?3;iv*K97Eb_~nj}tN?0?&x+tnzd{X$eICp^s7QZ1liHaKHj)yeviy7*h% z%PN#)+nEbcU3USB57Zpsu>4gkY}jaWwV>Rbm-$5$C}?R#YL<)`zH&MLzxEXJS)IPn z+|G;eXT6HMHR{F;)Xoh?IBdJ6^qyupdOhJ<5(z-WthL`3^m#dB2Q)>XRKekLC5GVp zqyngD%2ghp2}1v9wn$h^jM?@#2O!T4!&Fg7f?b_8M|X^&iCE~@xn4}YZ5%mYuUgeG zI_wCWj;H$19o?P|blL2_wLh$D?CAaTCg=V!mqD8#8_L0)jQx!k45bc6nJPP#%ACp| z)~HSP1Nu2-jf|?*c#^9HHNC3=M83=0IXUL#5j^8i)AfNsu$hCgFzfjWB78MqP z0}5n|?K(ro1dUd6q|e(kBXB1`)B*5c0>Hx9Sfqf!z)$uZIDZZfcAdGP;o!gt7#NTu ze)j(b5tdEBiooPGG{OLlwX-wRVy+k&A3r9a0_b_bz`)3tDGQ5;G$dlc`ku9HAjnU!eUTo`&`>nUj6%{>8R^f zT$)nx!hqbj>HER6r0oQ+R4m=MZI6V4(%FJ{YWCnzXou(ySK+MlwuxxwS@RX30+^K5WFuXfnICSPh6ZtE4XuIzvjc2gO z0LWp-nJbOS1SUn-gOa}ck*JNpKt^iX4xa%m_V-PH)m;$ToU%9g>w}llueKBV%qqjKgL~GA@ z63M!}z$OiMbyeRTSWaGHVbI{9a18d3xg+Dp%WdiGZ)shg4@?IA;2T~yGGubuM={L2S@8j@OFMCs)hQjZpT zhxYhqsOj!UMvxiU6gk}I(M*a;O1qQ^CTU=h2(cL%V?a1Q9hKyKf7(j{^v6DZ20(0J zNRtlw@dLo9%RRn)Ku7@!n4*G$|BAKgdb`VCPQ_?6v~_SWJYS{+%pnBu@=0|^G$0?L z;^R*NZJ^~AJA6PM8_(v+06GK(3zmOHx0QyF(^G3*?>iNMr6(mN0Z80oFG;Q5dWEUk zc71d@Pbe%Z>L-Ah)0oZL=c1dh3jVcEISS0jgn#WHvo=i)*%F_h0bd(<2#da+N!E1V z%>HEMsHKOp2xJsMu77+w$l<)`y!k7Iow;JrX@{=c4ja}Xef|1tf_-!R2Rt^Qsj9r0 zzO&;@sw+C&+>XyKCY9eNM3ZTwU`LJxzN%K?Y?VNy1(Xher?o!qXRI{a zV8?vBh63m~sT|K8+FFa37;2+$q@1lS6-8ZthIW*N%fi`-n3jmjI&V6d)BWx4TgDyZ`MWk3xY&E16C9ex8)TT?uU6H z>ODSP08am39_iMoUpw}nSv>Q4sK2Z;lwxN0mSLLXP$_n6D-ZsFAANqf;M<9pqB|;? zF;72B%%>O}8e+0uW&mjOGT`h06$i}Zkxpgs2SgWO4ICb~6m!Kg#1s^vfVVRk|3U&9 zK>#PiW;UAyWVHA!ZZ{NE)BzxwNIsmehlPa!u?r3;G&AOd0FhYIaYF_UJ{p%h)+DXt zygdSNMdC3gz+nL)7~s%=E?_noiVWn@2JaVFK$mHKe?Ce3=FS92s)m60@>jlg*d1~J zdKS4e<{W?)2h1qZYO(G27XH5#p-&@h-Sz+oT8`A@-R{+>V(uAX{^00DwE9xSdw;Ea?^8zCbvvuI;eL>n)=lE$pyhvZd^~N= zA|4+!wkMcu0(6GsNeIX4$owl7<4Pd9sV%yPVw$LKoX0{eQcYPyDi z^^g7^qP_#1>$h#csiCDo2o=(hqN2=HRzjtLjIt#wWsmlT9>?=M72ogux$o;fuX9`c0esD8XgT6apV|Bf>$7g&Zi)|7oM>Kw}LFRJ+EZo4tN-$Nd1eq@>W zQw!-DONIW*zCBrbwr~?!mK!RH^(b;RysyZt+kS!5syBaohz_aO9#C%vI%H-7Wu`~x zUNMhJzs0;G>l=f$r6DpkQ&D}w@^#%*m{|-wzwOOh zv$**7lE0t6)ccR9^d9mhl1D&b!P`@{dG)IniZ9QsS4D^2-{1eUdpO3j#LTOxEj%e{ zb#%o&zRR5lcV2Bul0UZkk~r77bLR>c@!<~E)MQUiPS&YEl4o`!@9cU$zScvr#oNt3 zrZ_1l6`Xt?#V4lny_&VVlIcU$IJC6izklDkb7%dA4YGAc%&N?MO5spk6RQPpG%tc6 zg#rz((P^Y-dl;Uwz?Gu5jXQRLXgmzHlwQ8z44)WJ*xHV*IU5@ z)I^ktzn`g@aF6u(SkhN!DY+s?ew$gB+nGZVWvK2|O7z{rKm`h?r(Ay(9nA~;5UN^L zp~A9I)%nP#RjA4(8|upv%*zPltP6Ms4Qhy1Rqb@`;Xuco*xB ziFX!UH-<-R|L37na_9(Nv9RWvgq7lgjbm|jl^GUMRTteQ;%v7+N^KYV&KlrXuH~}r z-nEMJYy9@i=Q?bX5uE0?GhJ`%qc4edD;rixWUty6y5-U(JscF#;J3;Xt(U|hJa>H; zT{!Prip?gS)3y86IpS?J&AHbI7!4Sjq>*S!Dh;2dVAM$&(Udq9fN zyL~hCp;2I6oi@RP(&q=Thtq5rkAY$IT!+?lgxD@aWm4hZB_RP$3ffWxDi>;ZFx29n z+!;4dd4qtNy?^S`{gTbAE*5av#~BxM_zP-YgV!XoybKRcDC5^gkn@BGa%ZL`@NXrU zu3rV~-rzQ2fj>z9{W{v)R=w7H!c#-Gz`_8lbuRAFR z2=U9vG%}V)gZWs#+~oUnB^2A)$r+tR$>p6ur_$$&{I^T~{dVy0pX&0+>C-1p^flk+ z8K}ygUX!h>cI+6-CcnEr(&Kl|Sf<~Mi;EMQa+znsZJZc5HrTNWg&0@7lvsYqhoV2qJtOhKXrH^tnuH`f4ge~AWQ3f?KQizqk%=BP2p9Vn$N<+}!PulHT2{39L&FTlTuK9!?i>(3fXSxX+Zqu$^ejjcxPc$rC`NvKI=S*_oGw&O{+_2^k zx5ZXe-wTh!KMQkgXAg53P=d2)7U9-6Z#=OlfbKNo&dwei8(Uff)v^e8g*?Podh{Y9 zzPJJnM1un*>@7X2?oLj+G`NoF+*-o^eUmKs-wz2!MXz4HI$N7!AvyK8eBS)|$BrG# z^W|3-pFXrEL?r2wUu~e+J64aIYBEy~@yPvxgS)oqWjpOb17HAPs3WzTdoeTf81QIR zOiWLoYvhX;@lHeMaYqC5hi7C2P4r~?EaepK!|JP_JXv8V6lA2&13T-ITDh^zCn1^?bLAEniD&1apMmPPtKJIHkGm}X&^YvG&5 zxVu~YO3f<7|6CH+OfqEwecS#bKzD#I3k{DUz6o$3c*;?1Zbem9Qt;}mKP-J*Y<%jC z#_S)=LqPiNUjwf{dBW4^I>H5i8~?_Q*9gq^^;zeP|K8>)&)eDA`Sr2zbsqQ02Y4o{ z)~xx|o!Z#Lvj5sHW-EJdoMQj&XYzygm|Lf79|;!q!Xrw=CYl}y$R)!=NvH&As;6}> zuK{#0!fr#udjH|WkLZa!a&z6U_i8{(ray*BA?NOSN%Q-E(jvAf?mp^<#`+6*(9DK7_oQoIt~lv}0!*SAU@I`8MuOp@RRXGqqAPqu zLyIGyxP5u45BmQ10Z#5{cfd z7-^j|;rt_QmFYFXT$33T~PgFhL68ma@uhr9}H|rA9 zUsjAmWd_Q!?f=9Ufz5|fT^c5Zi^&fdlL>_b4b0c};$Z6ce&)DJ?8yGR60#WeGwZ}J z-HA6S7$_8YLZ6Jxf3&?esfH+m$H0c(OLt0tKIh~}?#w6C-3g{;8^O^$-ePt?YpVb7FJGx57v8MmybisH=)sX=S6!;8HkUU@V9` zn>UB<2Z(#4zao5o;`iJ51!MD4;{YLM5>@f}ix<#yy7sl>2!F-nfA?^UI$Bkb2eZ87 z8ID=(JUnVAPOQcy3p2y1812HzvB6H*`^w77B!_;eztK#*1w!D)d}h|C<((R-mR!BL zGt#aT!~AFi(``Feu2``j4JtFMU=y19hXDcVr%qL#V{}|hmlWEGAFAZH$)>MK7VRyp z6q0@K<_hZNgt$+2=@s0)FbjpH-oCN;{Y~ahT%9&OyJco3ccFeVD$iIA)m=C^HDrWV z?C5FF)H~=Hn!WaGl+`%W3vqW zj*b;U>vn4y*OzPanHAO4Oe$WVgvn~|tL25mxbvOE6=9_dkBATsJOb+zK~>x$gEi@ri6UM$OR0z#@{rOJWIqJ4Y(Z=DJ=VRO6tG5)`v*!_7c5F6DJ|%s? z7(uNh+63PdP1JN<`sD!SG|>`cvn&JgLB2vZU5Bga_OXQ&;LLcu^V}R}HX%bak}f0l z_Ov^e@JO!%-R3?$$Ya;rP+YDiqoS(1%uhhgcx<383ryDAl+*r3RQ-t%l6DmbYk+H! z>I9lXMJ+83c-HXOd{Ex#j>p=vOF8XEoj=!*<+$|C$WeDcE74#jRn<-S@K`r+EV6k1 z_rjYlraRz6RD*p9(7w0`yH!v-?LLq=P&Y2x2$RwA-$Q0daW_4tsN_$G1W2;>8Z3 zeGBM_E}eEpM@cC50%$~BTZFG#TMeO)Etei$AE4*957+aH@bEbvpRN@fN4$Eac;ubr zb?eemt3Mel{-Xh4I34`BwZe2aT@#bIF+#9QsgGa4<8mLY4V6%+soSz-dfW9GT8D!MA_MRjm_J(;lAVMFagW@3HqM2lSu>jz)H2Vr5VsCIPtpSTXQAH2QH zV!eNML3GsSC8wevVLh(U-Gjp?reOlhPf?Kp9k zRDOhy^cSpQp&XmejZ@KWI46f}inq{5ZKvwK>5E6t| zs=7dXXb7^xDtO1oC(?ap8aAm)-jI5+Pd&pXCMJNNmqWJQfyYeJ;XY35+DC_P0wBRI z=E*G;g!;Vh3bRn!m1hTU>!;R6y?y&4Ee(Fz{BNmNdc**GvhN~p40{Hb>%g~z%ZY93 z56GRoNpJ>0%~vH@lt54J%#;-X_Is-MI1eRsA7e>u#6#;G7+&DN2GDAPAcfWXPkm+p z_$FbO(LUDg(~+BX&SzWRgLoHXYN~F}Cy>kHduS%dHD1Hc&);qZkjhSueJ1bsLL|_7 z+Y79R!XaVbV+e?Mp`Sn^u0CCd=J{_uyg92yxM z9K2)q=&&5{5WWksF%Jo*oa32R{ke(>PI#(CxJ@OWRSP4NP|4+?Ib z&o6(zyGcpArN@8qg7!WFAPWmHZUykdbSr3}RHMJ!01xlcqwfTsxU|IUGc8!aV=su0 z&&Rin zS_*+E1WbK5Xka1|DB1jB3Db3f^LHoO-^1^QB+uZ|=S6KT*OyB_1>gp9XYBPzOjWe| zXZ!Sc{CF)2%GCG}W2j0`08|LN>Rq59Ez_Erp2#FVe7JfUkMvh{HK~eLr(U-`tX*bi zW>^WN3qZ?-Luf&D1JR5473lx<2G#DvxUC=@MzzMOG|;r^$@&X_=;qMyK!S4N;EgqvUPW=FXWr<7G1%D zCj@U3ez!b6cxt^Jr+TC+F#p$8qdIWXXlZ@h)_Z(tRv+M0sZ#I@`|bL;>SO_QdKVGt*O`S2{@K0w@B2NwjK|#z9IO zERmpl7VtVKD2S=!&Y2^(Z{CC-S58s!j>*fcqPF*zspP$X@c8j(JPTC_TKK!za%w3> zxT#8qfiTd*(_gV^dNy>J%WTlz^k(5ro~E&*S_ zscKX!oXs%K>jMK9!4XhTN{BhruDb=QLf@hJ=*>L2B>W|M(yjx23VwZ8|Ovt z09?gmeXJ~$egm~2^r1n9wh*kUY&w@%041nF!G+^wxfY%q3T!Nw?3?x9{o)n7=qOKN zG|Dv~PowQp+r8_Sa7#W0QVs*yBd~^B6Aj&Lhyxx zI)Tm0sogQ%>hZR1+ceWJ^0Vo=2!n(2GZFHf5R()Wt4jQHF&$DHA0J;IaKe3PE@(EP zLm9yo64slV+W(=Rski?Ur%KLf3v*0nk2uji?UKQTwGJAd6v_|Dc_Si3pC2$}$$ zKyqnmdws0_B$<`tViOz14;Nm$1kNseEj8flAeYwddtI$%zN{0oTt~Xry>M&5&hl72 znXTKlRmYvX-Z4qee@-z=Lo^4(xbP}??=un#DRK7_?L)Dmx=CFh~s5%&xp-JY1Bez z;0SddjVJ)BZM9LrgP@?_@o~_FoWBodxUQ^ko^N1a(2ri6TJN@t!p2|MKB-rr=fUmz z_gGANI|pE&ZAXb*bj3;*mON~qG}LZzfMC%2!3c`rAGpm-*@LQ;5}Tc9T)Yu>KH|`z z7;~|OslPbR!p2th?(Ns}$s*@}yxJu2xG>=>8p!H+g9T89(W~n^wylB4hkI|bQ_Wo; zfDwMWow$LCa&B;7EQN1i485$EY9z0;G&g!V5FZ~1I*~R8z%AuGY>28rt11xYa19N5 zAQlp3|C(;BfRNBK6vXkrokW9CH$kBn%JSf(M5|rSQ-bfH@A*yVlTjxibt5@d0Ew5b z5k|h(2SX9Z@0mU*95awo!G9FlF0G9_Hy2&XV3IoQuK?y~Mrr#)`W9eeVL=P&0q-QC zX2N%%0@c7z;t$aKS>y5gbNtO3Hnc1*YQJ#ZkT*eK|_IVirOryJo{|h*B6P96o4jT6Zj?& zG)1fGTk&-8$*$omNRKwnh4Um2WQ|(omRjx4Ije@^Eu&NZ?)wrJd)J7=e*gIs?YjQK zqm834&bPl9c8Z#%5+Sm5eQC(P!FGF`|CV-_2!5rO&kuM3r^bi7o#z-d?0e|%e_TUD z5d{+X4UI=QU~$w5sBN1F=Q2WFg$nFOVmN=+9=tH)t(SeVyo28 zoau_{a{G(GVmwUL1yVui_7MNU-$H3FKsHKg5q>;4M8_NnR*(7kcj23{be{4N3ahIs zDjd1xG7CPPPljF!AF*^#=JU;qK}exHEme72eL>#$IaB2>D_FEi1s^SSp9T!N(NTac z&(p!HdHnbRIk}rA`VY%HpJ!xn{0m3IaICJWse~R2Xco(kWBKXRCp20AC5`a?un>|} zo)GTPeWGU*g|4M3#+Fxodm`kbTIF0x|BBJkQRC9Cd=z?^L~vu{1LHqPTCT-6fC`FT z2Lmx6-$q_uDdEXvEm(0_ake2M;4kQur$CA(MJW`&YmRkgS}SQ z)m4Q@2mneYqv#|+6=?pzb%I4AaL5|ud3kywM>ywt-W7x^)7+dU9NE^aIVT!S`J$AR zO2)O0eB;DpXU^0%1*tzI;VR4uRZtd28ani)V6h)z zimEPG=_g-`ia>~mgTu3M`eGTc@fb}CS!eW?q|K6OGDUu{ z`nBqITNih?A4&<@wcnI^9u(teOAFL~nrS!uoc}SqBs1%UMRuy$Z~b37JyPi<&N2;d z@4QAB-J8BUxHmu+uX1Sgs;%(-CVkQow@UxWh?QTjy#kU1Utwe@(6O(Vdm`Lgk{TbqR(Q_{s%9*G5bCGgK~OjA{^6;19r5^QBXSS)7mC=vyFBg~=Y?y}Mw z0xLp}0GEGg`Q!m+0KeBa9>Zp5K{^g)!NmJC; zr20@O&eufgU_#P9pR8qgaLq#1S@w#BFE<@wnm5n-@2_L9m5hchasjk;Zl9k#7-eXl zeoA#;_s_s%3PB$0zH@9J%ebV~&JFjf@zc$VnXb=9Uu0y|#@?Zi@ahphrGbyvEhlv@ zs=a&r`n7m>*!o2oOpgw?-L3nsnsxcCf^MBe_wDeA8!2Z_r20#J5AaSKDSrd#xoIzc zpdH#4wSqUsbMm$**^0esao%LQO~7>WXpM&3#!HHdox=+)^!y4#6xVgLR@pzg`K9e5 z@c*s)W*FR8T+bVkNC5P98}AfIxlj#5JjIgXjd@maKJ)pGIeFDr;*6Q+h5F#jsczhU zx6xagMZ(ZMl>JD0_RNB^!3Id{c-mh1|9pne_$?eavYP5$HVz*8RT#FUTJuzos@$>F%S<|sLig%Bckorb z#?N$TF-|dybMMoH-t704bfbE}K*_wUir^affra#;R-S=gWufF+kY5`&y`@1?6%rx0AFI#{>`>OJ6 z&X=bq%W=!1A$P%+2jz}H!CG`vWM;y(S$DZgXBMuuek?C#ap0`sLK*C=isg~8`_lxu z4bUjexi>oivWCHK4$T~weO*9T9|#qXLi?73@C_OR zefKWwCd>{nkT7(a$8{a0{pCw0bPmGtivOP64wP9?Uan4BS~Wu6YC9R{pM9-jOWGiv z5_{>nQX+5fiXdCA&XsH{?YFkpP0raG9gW_IZso!9XBA032HTZZE|>bd0%7tuTFLhw zZ95Gs{N9BI<{lHPB*QP)uC@mMM^P^W&#a4<0_eQG1{xKu0d=(S7QkmLyH9shEn`ivn#ecn*^;T(B zn|a8_n*6xa{HHE^fwI@l8#fyM{;vKInE?(_&Vv<6_X1#1dsIKrJqmA6`Q5+&4$+h? zf`Skl6KojrfLTSjFbltYArTgmJX*0?99~T5LvS0SwF2zk={R5rnteZnC%8!4frVk- z#gzUNES z*4dx-sEQg^*%&2-WMaQJY6|oXd$-K@V!V><)CE0)6j`Pu%E}!!v zuua`x{j#XGdU$pbt|*h1QJbNq7Y+*c!T{Nm*(*&{3LdF4DH$c4;tcNlI0j&tZ-$e@-{@UFfy?HUsCxC4ea1|v<>BtejjT^(&KcxWT z2(sG!`6+8`yb0DF@b<7r?|AQrNlAH~d3W2-@9Vtu@|$JPmXsa48+snbd!~wMmV6D| z#V&ebEo_sKI5DuKdbj7@Q7DC{oh`rj_6Xa6BS0Q706Af4r;N zUX;+0*_5&*b0li>V!~`vc2>)WR!MU$TR`eBT%s~looZAtvW9ESC*e&VzR=a6pgvl| zZKa(24;zNL4r@w}wXQ;A1*TH|DQeJUz{jYgJ!D(^Q(f)eNwF38jyCQ!%D^n4=G^)i@srP%)sQ=INfr}Gz{NNo8C0|(K460&uTHD$j zYe3zTYuYjuVlb4Sm-f94$lp2LNQANg6`Hzy@K^m{ITMYFSc$(OJb{aaoYfd!1C1Qq zF<6vH*v`R`g_n2!1T}{#DNqRrM`CNtUMuOa6NQh4Jj{{pC=b~&T$nR!;*Gl6$to6m zftuq&D?*YLuY7p1UU_B0yy~`PIW-n9trDpT{vzyc*1Uj zsQ3>01^NI<#vsi1N0c3fc!{gB2z98G1a`FDckkS3!ij@cp)V5n`SWLxxuof?gw>k} zAl+C6Up~kzX-(Id9l*6}KxM^ZYui;Dad-WlnQENr2Nwj7LR@V(8Nxxjl^X1eiH>IA z_b{?Y8ZhAjH+dQ?UXD#y7myZ7pB3NN(8!1dY!hHCyRiO^f|fUBrcldC&Lh(Wq?rWV zbmaQR>N?^#lXnB91r)zL5|?1x^3GUg%jxTHq)r{Cl8#%PmQBxYb8pkdUwN3^?FF(!N4@B&#|4L^KX$CnGDkChRTX#h#f$yn%T?AhuP*V z3o{mL&dF46E?CVRC1|$WN#1K6QC|yeiih`4=2wO6VS2jcvMC+GQ`<^kKij7A1rR zZ{Dq9V+If!a%zZFgXI8_aBwN3w92;EjGgB3#q)=$U@t9zs1!X2hQnubZ0Com6AQ?F!@`jP=6TA7Jg9XIR zFPm@e>5ES>4cckOlF4rWr zkK=VGuNKU|R_hwC z?ZgqH92FoRP;v5fKRJYumw@9l9Um8EW*(2*=rc7HXl`(=*? z-}4-}Ru|Y4$c=&1j-egtQ^U2Ixo$*bo`b~>Mj?ddz*~{10{$1_guOt!P~fzkyOS?8 zWCcP@U8&@2Egq_Uv5-G3cqx`>#U+kn9YZzE`d{Xzt9zFz7C%(_app7U%!pf5*0h*g zRJLx<2uG~Q(x*Y7Bi|5bsDJ&LnR)003x3f{tHvBIaSep>}_8DgvqDI^fzedMvMFOhr1FmibUe^x0yb_kG z3s8bF@BuNwJdEU7v?!|7LF?qnPr#|vnL=(rMZ$gbGr@=%7dEXE9z=p|hbk$O5Te~D z`xgL@;wC2#8p*q;2e;}i$@C1SIqq`TDLB@!ITrv>c&-(`2Ax3EvW{Fk;G(I(0Fv*q zo}msGRu#I7KxNZMS+c|sPAFhM5tt9vf)@Z1qE>VK*s*P!0sT!w!?Qcrt_6-n+3Elv z3k-~o04nHHh*Hm=^HHI~|4W9~8j7GlM?Z@P%8rfzyM66$)1#RO-6<;c^fGKPw(Vav z<6XuEMrN~}U9he|x5p^*bl`+5BcP}&cSPB&CD(p!ZvKWTtuLo~w#d}qNRh-psD>`o zBd4<=iP__xlCpASr7`f~!>f0va9pJ;wr}0q@<4&R88 ztR+Z%W^nIt{ryuAc9lxn)-gs`wZSFPgpgJpb`ELN@Lv#JZ}&@h-cqyZZ%o!1XWKSN zMDZ^cR~bA|+J5vGJD<%ty`@3&+{<8o)IXiMJ*lDP2bWBZbEt%Wa_x?4iOdKvwf;t; zRt;at07<%&)JPp;Q8PK#h7c}E5EQ#}yZdF-l1Dj71=%XFnVdr>zJB6T^BU*5Dcc+kB)iEH=jpCI0{XPg;ZBmhkJ*w6iD zv?(Iurd(^=nw{2X^<4!UrJS+}G}@$)01YA`?GLV#XQ2}DvOU>19VX5-w>wUX5vM|vC8V10)$IcvS7WJpVk zKCDjgrC^I7UjduItfxSIINyev#;CiW6Taf9;E?~%D=6$behIso!Lkxhg#rSg3=yHi zb?u6oF2i>|;=cpwD8cyw@ehAt)fL)afS@ON8reM#-(ns&-5YtkCH}bBO~I?Oyj*PU z%T0xsriX718~ns+b*?%s=ggl?5Fhgv@T@?1wBAxiwa%%^>UCCjx0z+IKhom_OE)qiJYnU`EZ(RE z<((`%YN@~c>t&!KXu>2;%BKRf@CAdYznHB-AP>aL=H}*R3>R7nt2ALwRR7zMr1tK8 z3;~)Ohiwo24@kvD9YdwE#xxVd>bwWa&*E`2$q=q zNqB75sL0O4C!s(W8j^lSE*6xR$`~DGC<@5p&|x4iBt1kJ?I^N~3QZwASm%vU*9}cf zh?$_x09DG}Az`Ty9EO0?kvcIV*Zd#wFZ`A=eOV$<53Sfj)CklW6;vpY=_ct6QM=2Qs!8Ei0yODb_cdFZp?htLK&KJ;2uLJ5nA~SnQs4}8R=-S{HAm=K2 zc(O!%L=TJMlrLgpVr1_mg4To!6lMjoc3gyHZ(SzGM&Z?%obmH$7NCza0P&46ud~jn z>n&IoP+R0IK;a`Ais&3|>%93{xVC7h7Am9r%!D5C9N6y^J4Eo8f~u3Cui7!_??UC5 z&3kQD^XLj>s}y{H#r$A9aTCi9m>G{fY&)@ZyhbD2uJi2SghTqdIS(KD$I0yR3JKB1 zv?(PH@f$mNTp}Q4z)xe)k(#NGd5GTm4cs^f27LsQc8j(JnwF`zymX zy37=_Z^P~zSvHQ~VUR+KA%9Q?@tx_1;mJS;TQ4ZM1X*s_uAu>w!Jk5mxE}8m>i?Hs zQ@t^q;hiLZrfWQFs?a;4zCrfa#ha|FSAYKeoNxEmHLSKvr^fkqnl{ZUJrv~+Kfpf+ z8d_Tr2u~YATVWyFfrg4kk%fdT*^liCKPc2`v2DJbGs_n)-2e7eBAht0Xw(^W8+>Qv z&Z;jEEAsX0*YUiEd{BpHkD|fDRcr`!L6Sbav!!mF6WQ|e^2g!3NIyrD5dY=7?6sFL zV5G&2ORxp)@aLMKCjmIxp))1?Uxan9Yk%c~9Hzg0H-{^`kMneU|M|Q{*3@(%+YyOe zS;P5yh+C421Xt*73J{>Prb7&FjM{gyN8S|(dy1SG>LD_5pl|;fRy403Ja{l2o68Z$ zSQEz`-7$17h~~Jr>fC3hh$4gO1uVAqjO@BvE9hs@r{1>E@{kn;S2+PAoL$5xO0eZy z14s7&_LEy6)V@Ip<8^DJR~P% zYkz`kn55%tP@lgm+M1_SA5zJ9d!_)SAH_>BTNY5lA)&*fHf1T;3w*RAV21$WHed*# zUm*-2Ot4ym5J<476-BAOi%2;zI3+aTYz68BY=`7Y;;;17Uv+3=8ku(6A#meiuYkK3 z)8VLB2e3z}cbghIP1C~=rNONMxB@cj9a1T>XmZdoVXer62Z7DJ@(qQ7uNly z8`-%O&V1-UaApso5v1)ies;H5YadiQ_pDwMzctM1_ZEb8A0s)7A)|a7He5x?Z4jtp z&5OGFzi9Ao6#?dMRb>s^t${SM3LrUf_mjTa`Zvh0V>Z>P=wQLqQ74f9z;+t!C^Z4` z!QGYSoNX`l=y1hTT`biCAUpx;g2P>n7Q&1 zEwTQvScP6MW6#NkAN_T`_1zZdC(*Z7S#|}_80hAa*u!0ma8&RZUd}P#om={;9TUTe8`=8Bl&9+ms zb^`vvxrAFdx0K>8AJBP@pzEIalo12?7exz({ z6*+8S9Y;Rb05(&oYb*YZzO=Y5y6xV3iYIjq5$48Hq6ETJ{!u^P8te}^SXi%(5DLKW zc>whVz9U>N@Z4fY?nnKH!b!3ympCl#{8Cb(D2j+vdCN!iuiLS2A3G{ApaC`m9Gmb- zV}vIKD-r)dgN8B+3giKn6E{CfKPV)u03;genB@>d=gpgkv9k-oO=AKJ=8gE^m%^Ds zR%lw3)2GXG-Q7{b@Xhf>aktRjith$Kmw2b6NAC+PAk7Dng*K8t5GsBLUj#q{{urS% z{clfY;jiL^>?1`Vl4LTNN#P>vLsKwO%xFB{#NF01I$bh_)r*-7;8*!`vR(S~8An1|+ zV=AI7Xdk)3kng@1-uIc;$H8(A1RF9H{IO%8QDaXz zEPMwr@HPOhHo*3S_{1yVPBk*(r%V=DB@xID+%r9I-UxEL^vR-S!#<$M9kiS4K<&4o z&0!l5OErcQ3XB2mn9|nV?vpljNG9li4n#4*b&L@@yP%VDJO2?Tc8_=hNS1hNb;v8= z==OoyrSU!9+)@-p5*Mc&9n3YvHB#`IS_ot)z!@M-;E0F`Ldxv`CC#8_nfewsz zHXU?IguDTdngE>Ip%F#E-->Vf;L)RscjK>*N0F?GUXP?y$~Rr^s_X}_Co-18s{dHW zu&@%uuJ0k^fk#GQ9TW%@J_S(ISWO!8jrl6{fH4(?oM98g-7pM{F2H=9n;o~#BH9FW zyA!sw;2J=v)3~4QKa7l*7#zdb`_%U40Lj%cOP=Z7>|7WmpM1z6XSs=84 zk4;;I9;SULLKWm2hBI~1vSr3F-a(SX)d7nTJ<(7Jdr}05Um*`Bl1yaDM%%=Li5>@z zt;Q$8@?s_@`J%af zI@AevYkE=ZA`C?k$`j{@T+{v~bWc?+7|WwR8Ppf;9A00Xe)p}>-NffPMdKQvi{0kKg*uIzwhYD_V?>cj%x zFZ91(5)U<*LSSeVu8UD*32Bm&BfcBi&v5Fs-KU0tGs#s629M_81Y&3b{UkW4NrfWg zI)prOMNpdt3=q7AI(L+FYWmPMr43b{nGIMIW`VlvhkPK0q}x z*8*NYL|ST{Jq6@I-#QFeo20|`{kgPM0Qwu;cqo>nX?*_j1uNf^J)&!wJ#?xvRr=d}=M)(9heBY5IFE6jQd`>IFzNS3FI_L#K z8kImyDJ4?^dKAFG@eBZS0{7!MHu6eD-uh(_?W#GV}I!V=iBlqupIV>WwY!&A!7-!D9pXW#s=bSq} zGFVu12KNbVA>0pAJ{@TEaUlT`!bEZhGs_Us!ES;>1(pg7;WJ3A27o5Q2!s{*4180B zWAMjU#~=2fOJ3_GPeMve47I=L1%aY5>=*`iO#Xq}5-A)UFw{XnZFse1QC&clAPLE* z)ByB`=bGJ z1T8tx*&tefc4`e-h7YGN#RtCR*sjG5nwysZ^ z>0g&z;v&)h`tl*dW=&xVwB($gkFZ&>w6r=Pf*fiBFnt$W48H{)2FgtVHF0`IOLRVj zjbGzWBt%P8aB8b?nhPvbF>pZ+5Dn}P9yD%9(Dl#2e}STa?*hdQc3D&p%&bTJ%LkPL z3;mD7wsxmR^EM^>_MtDZ1C6g^?hY!2!H`x53n zV46NGGQ8&48#LGo(>Fk|*ky*&eFe1Kk2R$@CLSIfWq1JKAZAlu13VXTbuTfs2ucK$ zd1b(BQg$g54c3q1nMA&I8Cfh5FC9FP{3oPGk{>l5Z<;%0JkJ5s-)^Y{*CD0H%PoBr9EnIXI{e-_Crc(f9?!3-z80`k<$fhHY-ys}G%zZA#?V!scWgLVu zDXO@UK0{?r!!pCX2gV-Q3)ES&mskUF`DAX`r3()$GL>l4aDGA8uEQJ#=<^Jl`PR(4 z@P;nnBpPu6M-dfSiYNnmqSIFf(kCBgvKHDtbQw$EuXf6<10mhGD{v3ufiQnFt@>5h zLkS2NYP?AU#C3!Af*LT2SfGCf2*3;s6*yex=Dprb<3E;gih9!7Wng#Hz%!avh;u^h z5oSzq)J>*MaN>;N%tAbv#a^b$`XEZ)41&E6H7?33xg0(f1zEl5Fd4N}-?s0<*M zO5vEoG)pp&|8KYevOMv%-6U{a5|^3?;Jb@XRRn!aPa0#{skd66$QF(DBWQpQtOYUT zR@h8haq*)+6}G6^4NhnutOr0Z{vCDn0vh5buz_)WG74Lb;b*d4)yg{4#^Rs?u|Xu1arUy_K_g5D_N2re?$LuUj`sta5CKR*9Xb*|Z5Pqf z((c1~U9o1(%}@Gy@YHV*pz=3eCl8ERuA4Dyjio1wAA>i=>4!l$&7+Lp)} z+?=>^ZGrckhcQ8R6!FdX7`H%r36uyb4P;=k>((L+i7SnC9hy{%&HT9RrkZUK&6l()c?zIc3~GsxX?0E9POQ{EFH91)ur1EvX4cZ4#{ zzhT2{NU&tYLJCq55*I2IuuDwVz+r$?hw;ux8VvwXkYq`?RbsV6cXAae;aG-`1qG|{ zPBmnj0H~2)Nx*1(6nTJ_bLc=}tD&umiUD3n4Ej z<7^qsEek8i0wXRA>7Win7N|uATCx4GRBR`#J2^Ac&cFefZz3id?1hU**KN#*?kZyH z0H6_#Lc9l!3xF9xUw1g^|AhtR<$VJK3xSjiFrNzZ5Zb6ahvgg#xC#pgbVBVBN-mA8 zz)ib`-UTD=n%4O;7^WYG+g?M}htH_T!D12G)I_fV0<&rSmLr;o`Gdme ze{9CaoUN#$Vubbr02r|3kf9+T{x}knE`Xse)Q{3}To<=-;z1Mv%r^nEp-k<>Xk*0M zQ3R>4##cq5#;a!r51Fimr3EANY$d^kVNc*Ql3R>2fH0!k)L*Vb6}&RuYYh`5UI#%i z53U6aM0byXz${9<6BH5*k2wr#2iT8>vH&na<55K;M&|{YXcQRG>=a1^eNH0?aqIYE#uh#^ z?tOaa$KSt~{(p!A!$i;iCqM(m5dzcnXP0z{EBNx8`#PEpGA@&}w|KEWkPi-2IpX#p zr1oC=wE`dM0Ekpr;Q%^OzKQeu(~!-HSF5xZ`oThtuT;M35s{I|)vFy^Ins|OMVBV| z0*N1!QhJ2@jRtf89iWHCkT4ZHJ5i#wfKCB05grD5?f1+CN=91+V7~y7OI(`t<{h*+ z#G{2t^FXNtc=?8;1BTOd+wfmxZUjeJy0j8bB}p~GI`onBjmrrUFd>wF{J5_`g9~63 zIe9Q2hlUYBbVq#xxy*|JH1MfzP+Cdb4Q(x?EBM#$;a_9%XxszBiA+zt_OCY$G}!X< z^g89dp6Ak^oXpLBx~k;Ge7m%Dc$I2iL*RkT_|MWT&$?M_2LKIB{+;sB3(uA!22+q< zduLmDo-TvBR)CW!Qzy;H>+EVRvQT+J z7||7sL;e_}qiJjolw$)NI5H5>p@)Xj0NI?d9$n}LK2rHe+ZSVgi(~5pg`a6Qyx69I zvLI2egm0c~oSEWrldiK~7!>*TKy7y=!}~_rL%E+4c5y{*0~)14TSrRxe*SPuZL64C zSnn#?`{rSf-E0+0hS2HNH=)84Ye%Y~i;K(LqA>eg2S$YJbAI%QIu-1<^GIzUuXV^( z8T*{!T6=~gHJ{NqI@+`MOT&~sPiZ$(nZp}kTp}>woolF$egx>05u6aQqvM3V^`IZ^aX4?LeS_pvV9 zF4u2*_|Wc?J=s1lE1j7;{D&(>C9N#oVLT;O(sXwat4~{YOtQ3CRb%D&x5}yA_AYYA zykWp{$I-on@X>*8Iw@)1CCR07Oeg~ai_p)oy7|HiW8-S3aeK9>%LSLnAN#3x7)tPgE-!tZ~oF$G2jb}lhEl<>wNSR$ZktCO1=VZQeY9&jPPDf3R#9R=0hdHXfJAsVYvgqAxMD}M4__U)Yt;89eaggFN1w4Y*C_# zxMf$=EHF)~4i2O2M$LE?D{e!WdR_B}RPfpjm;d5ZQ+XIBSik$ykZH;MkF{I7i##?> zKiL`iy6etF_AT{~3TM{;x={05d?O4b$lx8=7Q<^d-I3~gc+52QQ^+NeVip~4qyOeJ z_n-fGYu2nQvy=|Wo`CpwxXl0S{m6M1EHwh^lPRke&fKvk*CZKqf>+_KaxsltLS8TP zJ#gfIudP^^)hQ@jXBPNX;;^ZG(oV~slGcZ=yZ`$KFr=P%F6uvV*m>8%6s_2Z8+Ilh zdLy$I!`HIzgT$aV&UwOqBjfvC^9$-# z?s{%Ks=HLzc%-zNXRGh=9^T@-$-zX@+{zUm4*7^@=__t_SDRqh5;b(|)=Ffj)iE^l zZm}_*W@@wyt4CX!8%A7$3K0)ohJW+qN#mk|pDEQB{*6WGy5BFp3X~9phdH|yb1;Ji zA(5rWW#@)1xnps6s%Kk+5{;}vh`U=lvgA;mk}{qa8s1-*GW=FnervlJ`hyV~1C|S< zR#0|MTCAB@uDXf%KbI(_A^57y%#VxoKHq;T8drP8xm;%pDYrAGA!$*2v?OkGl#d2| z3+s9@@fe;Ik54U1Fk^MV-8ItMiFu{_ge)@wFQcA(s4j_e8ajz#?)(4_cD>aFFSpkM zWtmq=6A_K7i#mS!c5)}pZ6#KisLvhlW6U_$p=!eBi3thRZ*1AN&D<^+#4WJ};QX!+ zfY$q8U(~_WMqsJhVr0F6M8wL$GT=M9HV`>0U$4R7mOuyPROU0n;u5&uNKyVLu%>vC;0SB*rJ@|_M z8^O0+nfzUz_Q4#f@h=!jm;R*m#3oClfWf1v3M)E3CG~OWejMiQXXJ!@E??N&4N(#B zoXFnQtgND+-~aS4P2HX(D!P5>ZF*_c_U`jtqv^X0h49V@G+42P4CwO+(k;h*)Wjl^ z<0jfIRjM`NHePefKmS8O?@4HogqF!6gI%X1aLk;9<1#irJWoCLLcM`17QW59z znjDO>{Jz%`13fW@Xm?uKh3+5>BO$jd(u8!PS?hYLq6$fF-fcjHH+c(=w%B)a+F|59#uO~ zeS3Cf_00je+bAl^iepr9{!?Qk4<;Ql^#_8HMht*nO_ErB--sR4=lGP+i_HR+N|Sa8 z1fsJFqtqazWn4+`n5HoHzyJn7xMRSQy~23S*RL02!Whc!(H-B7Xet!e6~GnbWvLC` zIET4O=ow%iKrq+rtR~`_&@GN^^6QwRrLKOx;{1?jQJ^R0lY!#^9wmmD=C5GY=;M8W9bku68Sa9(_LWyBBVgJOw=c5|_!u~0h@QHP3cfu~(2X&Jf zj-9;_v>e=R@Z;ik#jpU7x_D_bGTUzq@ZTcJ?%-uD)&{?(g%dOc(IwH$truzkTxP6D zz2Q(xo;()awZjON;NLqE;WS)^nF_?|@*;T#Ity=&DKr%C$*lb`ePZjQj!et~^pcOj ztEZg=^Ko?`@?USMaO@$I+EOTwB#PjZlCJ@8Q-D;7_kkbu~D2j(PU@a8Foed^h$ z#22ni*Ja~d0*j5IT+Oz&w%+f{*H#$HU!PWH`sN(Vp5S6E=BJI3$D zl9Tt~W!Qd&Cm5;8f0bRk6GJ-ug-^aXzQw==lPr8azg}$65;4-sKZ@@UdJ-^#jGq{d z;^>%u+zq}%r)*tzz1Ke5Q0o;G6ri%mO#KCa!;Mbj`wL~ZtnBPZjWKC7w%+sl_3Ibw z)uSuW?QNd9TXTX(UC?0v(1q7ns(l4RPExs(1I((bs!3Lj+)(~8%nOC`=g*&;s}_Mh zwXummZgmA#SF&b{U;cdqDL<^t7Y%(YD|e;gO7xsAuC9-4wf+tBx(BUGUnhkIgWLLajD_2(GczFwieaveJ6SbK0xv;S9K#D(C=E&LJvp%;oC)QMe2-tGK7N&0u`{s@) zQ-r?h06#?_Vu{F&Y!DLiCW;?Q-5@?)Jd!Ig2xC&lyYg~B;J5(T6Y*@8En5bvm^c9} zM0`SmKBxffmD~7JNhvA3Y3DSlE8z4HNLY)<$^U5_g>diX->bngfr_Re@16E0n|9!PDZoXHJN-jp;a!kV zfxrGAPv-&G^WOIHj&+QaT~Ur%84ZMzJtHe4Ee%Sf6qO_-d!>-dC^I2R8Y-fMin5|X znNcVaPBuO7EAHocUia&D-{(;O|L^zxUgL9pKA-E=tMZ|PV=+>G{`|Srt+$~5m6XVI z==hZV?A5JHZ+MYC4SY!ya1?L$k`azMK zP$Fz+6y3(o&Q3o4l>LM7KP@8%cjhP|1FB{w!1KJqVHi(Af+zM&-4s!S0=vkOfnY5I z6(%%apKa>&M_o&SFBLz+O8Uq@*ed*i%lvBDHJhEiiK*j8WDOu6s1b3<6T*dfSpE9( z(F9tdnk+5=qN=kt#ibO{rfe19xdZkJmI;5pZY%&NkX;mgfW|~F2Tm#La+#AeQu`pg zwOE;3<}HH{z~|=xvb2QNywaz9BM57PM$_5XuhU7w(-2jbVCGCc#JZ4vFoW3xDT>6x z3L3G0W&*^)C7z0JH!gK-)vPZ=dHHMdZc+PAn82XXq*dF{uvj!A(!c|=)Efa(BYl#P z3&3?GOh9ibPF{rQjG0v$szcnD&T z*c({Pn$=HcA~0;35Xu(?;uH1rO13p2VNgU)aVjQ8h#EqUDlO-IzY}FCNa7U`O-roL zg$mVuO8D%z56=;?pH@`lpy(M5;v_VE?#m%=9q>vXh79OIqqgl7A|~Ed94tA4%#=kj zNdJdFBgLwT%qS>oU@*yj1x0+%DV0MahzNVwd-rZZZ;jXXiN;HegMwW~uUt7qh?O#? zgXl^Z^Wk9ak8wWn5VI{Qc6qwlIXQAl99&&-6KG6oV@LX(B-dH|#7n^I+(a=-1z`Nh zT&`|B{_!PR#g~BcvOH`+*0D#a-X9^3>}E0$zJGuF;j4xa%1+r{+=mK+Cx@&2kq`Nc zaE_%G!cOYd!*l(_hi&N4e^pdemb3nBa-R z0Q?q;liB(t?cCb@Niju_ddyO}+1~4DPruCQcsG6ylnJh=*~$?9%RrwG>haoID#goN z{Muck*u=xbv!}K$(pb)3)aGkiVF~h=VfJH51?E*!como{pGvpVvRC* zPbui%OY#QtDlw)dyZDEMh#idJfJ4wX{2(L{gEn#)#QRqKm9mt+~rMZi2;+p9(TAc#=N+55tA0vx%2{>%h=l zrVJU3l$J4ZYTw_fB@Q!FwL=!WSKFd~57T$Ghn6EPoLI=ARf;422?GJb423$GynXo4 z6`_!Z^Uiy@$mb+$lj>!*MH*LW(V~S!NGPj5P$UPsmo^}abt7Vnt1syaI6iVn7E5PX z05H41st@_vsp#~-VueAdfzid`J(O}xY6yOmG@F<+A-urQrxgiFCb?9jJ{(k`HW152 z4ds{{`X0)j@L2*o7>;stJAHR)jG6!L-70c>c^iTjG92LTWbz=cU%MurIzo&V(1+Nz z7^Q6H-Gj{WJUrs3A25oT!|Q`ZGQD8JoGXo>dx`TVAp*Kr0?S^+ zDij~!OwJAiu&2}@&+ud<+r!*}=Jpa!OOBy)TwyF;JwySy!}2q7bED&YVfG20kF-rJ zHdtbi!2X7^m2_M=?}Y;)t6Std$Xt~hD5$b9(r9>c6mu|>7vLHXfu|6>X1I0eZJlxn z8o{L9x;D!w-UhDtq)i<9yE7%JEa?#TKmE6q!_a*VmsKD7&_IeHxhGVJNU?>TC%&^x z1U0&K8y#nxdj33K^BHU&gicKn!XXl=oFJ|&rhrhOW?*0-s+1`Qh8UCQ7w6^p@cjPl zY2z?DFumf{_Np4pc7vC<55B~|^aYNXrF@vOhvz&+oW)>AhWSFYl$|xCM%z>+Z z3{=szp3SwA^Nz0e#z_0L~tIIK;;nSkC`02B~(J*(ev!>FENy`2Vy0k z%SH{Nb;OTVOk3r}KZFOJvu8m@P{5#`J;iFCvv&N)B5Vf;s#6bKs8}*c{tm+k;pf4< zilAhpe-ZNr!iD5}(OO8Nrm7LTo-7zac)R?imBo)Z9h0?8&S35!O~j;YFEj{~A@3WA zCnZz5aN?QKfzdNAmfno)7Z!Xh-pS7OQ1$to=;S=}lpK6rP_e^cV_ot@@@Nk+F5**8STlnqCgS?J%eM) z0~yuOFo>ofL_yXm=<4bUJF>&zrQ6`ud%S&USeEylUo(Etz%W1&8IUzMzUdPov}rbB*IUwMeL5vBE3%+UDL zSMenfB|eoP|A5ijWEvD9V92b9sDXWHbQ6r39;-$_psNA4rc%Me!e)IgS{@KtDG=vA2|?4^k2Ya_aCN2wH9 z6|XJI2N|}&u@=q&I+KBe26-@>mdM6!ku3tu7(rO4y1!pN@nH4u&8+lm)PAKeUYK4n zSiw=IahTD{N#?bO4k^&q=;`Ufqfmh7Cz{mq8|`(iQ)JdZY}hbLQ9jB0>gw4%3?|%n z`2-?sW5)>IwcWdSx3;zxC%AtHE$NKM8)H!9+VEh@iJDhu_Ysv1ANEXAk__F!&pe-9 zZirCU|N7kN57i+N$)uN{wrs_U=cT2i=vTi|luC;ypk$xX9gvkrsdkr|npC`6-B>8P{DZE>L3$f4 z9uA^060HqmeYAZh@1G;e@5%%gP=f|tx<^>AxD!3+=kbl@4bmUJq^FzB&}OU4_?jo3 zX47y`l&+&HXZmE3w$dkH=`2f2p5 z-l^n@3Sco5zCtHEJ~n4_4(Ts^p!-@H8vq<=UO>iSGA)NvLkE2y1~HwA81?eEO6hu` zUhUC$wF+2e7G+9EDa-mOq|L0Xjr5iZI6s3SJ|&%q>Ibmr>9c2RKw!w_iOvNRdZLZ7 z5!nmM3eLG;za*%!^m`oZ^GmC?+Q!c3t!#~sC^Jd|V3&C?$JO4&C6{h@0>}aiviUG> zIDWlOFW{VjdOR&HC9c)yW?s(D9v2thT~pJZwgR04;PbSs2Gm3Rdv_0yuIRDYpUG{O zx}Wdu0C8dWkiTMAJ^D!h3^7femrC-03Ug@T{JgGmeo=ruB}IVGH)+}ws5lhG64js- z6-Ye9e2B#CS$eezrU1Gt6wjnIw|uw+vJm(8Zlib;U(2^d)E7H(t2s956 zLLWKjw-d%<8IMAKFB>&KZrQwf^FXQ^az2UmOo3Ea-Hz8TI%CUP5ir|$Ki{9G-%Blh z2PZr@6}%uW&FboA(6_~Tmp_p_`%yXaYz=+C^h$2)@-d3Nd-q25ust9^ks16l zAD_I_HiNO0%7#(?g_B}P_Qp(oY{HoPDXq?YIahe#f}?)e`9vAP5hXl_RCF!??)@4<7XRxR_ug{c48@uFI0D;t+sc5DzToW^=mQ zDl3b=MuZbXh74)@WbDM+C9m&p5L;*JiE8@eUiLX2dEo4TVUZ@NY7KN(kxb) z_&@Qk`PxN!p{px#bLuBP-qwThj^8)lQknQ)CDDq0cR{7DeRo z(9dh6>W2=pc(_#1DeYW=TOf;j()o}qWJgYLlG*Ux+AoieZEO|jdQKN4IKM~wpwP;l zk*h-W)Qw3E5n1PEeNP8OC&p{yd@9%0+IkW+ZTySsHXOHY3n0`8KI2vvpTr-ehJh_k zWD0`@MuT*V$11}dPQBN!WSw6h=m6XQU8&ke`{j1WDaJJ)XGP?-gp5ep0+Tt>wFCm} zq2<}FC#GBd>JG+qB`d2DXPw6&G#}_Y)3WB|eqFil?v6!E{M(ioOvvA<85hbUFHUlu zNN{p`^esoT?}VT+PUHI+Jjve__Pr)@MbWZ1RchYmPV?ty(<@01vIH2Vy6uczFiEYf zuY`5z`#2r6F*i4d*$l^~IVDkMMurF&nMf_54+Q$4p7j7;xp3iv1rQKdtRbPHjC81u zI)_{+L6j0oII7ayLUS50D#P+&i>IXr{qRp!-Hjx0Fy}gs*3*p24eg*w?Z+0JC zBi8NTFMA6H4jd?N2PUSh4UivAWbe*uIdZe#$=U}R7L9IJsBEn8o!{Oh2!&6fBMDPl^2}HAh`L2=VR*;OoKrnccT@jcK$cr;dmZH4*3WOeY;sm4z zG>_x>a8fo=IE(iU9Ws!S(j124|wd1`89Z^-=eQWFJ8E? z8Idcn7RLkhJBK*yJn2-tMXmRn`)e*<9LiZK1J@NF&GzjXkxpObm+P(oG&7k>3swvk zVQ2+In)%wC9T7DFMF7qWun( zO0q~pXI7Hz2>mF74j@_zXlh(dZULpTfM<|~DDSe0i}&$`>fZ5bn_MC~!9BC6NCoOF zAm$B1^evic;6vd&a->L$PpSd)ZCgKM{qw^|kMRCe0qv$}?8r_i5&L?1#k@Z#$(<-d zIVPb2eIgLd`nxJ#?hI!MsM0$E6h5WT%?WsoznZM1lXHiJ{xD~cBi6j*kZT|Y*Fi78 z!2W)fAJv8sJwUrG)MpIh9f1<*?AD1YgPbEjm-(2xx3{1Q0EAf2ZUU$0e-Ki8w$WWt zQ;{ifE;30u<5-#>hfF%atvGtSWvFK?vdAyrz7@Xey9yJciDk{=e$*B+rJjn?3AnW}7n}PTW zZUbDlQ~o2OA|e?;-&J~Q=ah_xo%=6wPcQD=;pV8SFS@&PQ@Yud|149UdA4Yyve(d9 z{QWJCgOhD2H1NsO+dDCGIyLsv>z{jtR4mH=5~AMZ?-eV;_lKXpX65X&;{@6g4NW zh?ok<0m{t`O^r&ZCuy3+jao__ z)|^pNzEmw^hgW@hLL9CX%*NWjuX_11oAb$?oQFpoHNAXwD8)Q}8d;Qc!nr0N2oOu# z{}&@J3a-u3(b263Wa@R%?cYC`1gxdfrBkO)GWsJPuy{h&5g{tT%#~g8>dqQ$Z%Ru`#ZsCr z6&Ov@1;r&KbYpJ9GzTx1H4heQVxj_j58uGCq!ET{$0*)uPiIV@zBbS4D2>yA%W;N- z28oWI9536a{$gf!m4GN=g_Q2WIs#Ar^SrV$mvn82fSWF!q=i;6HrV{)Dlmp+%a`{m z8E0hF%fKphIiwMqW(jTZTXXKM=ll@rzS8v}b)U@ISrsWM=kz^B8DGQGk!BoXLEWl<|6_CaxQ2hTVn1C&-x5Q4(`ZS zi*0Zi_4Uo7Hf1VZ)l<1_zmr`Pa|I}3C~9J2LSZ}taD?u9YpylFZ*!fe1$oT!`GVlg z^D6)-VTA{BFCIUB49SPttVAHNI6-$<>~s*3pUiQYRdNFype}JZKjzOcnX_n7?88a9 z@!R<001~8--DtUc>^eXOSw%-KI^e&Xcj%0V=Kd+0_yzP89fqxJ$NRFju_0R=fiXSM z&=6v_pKLt9Y4oz}eqhoE1Pq=m@HCT4{4r)&mw?HjWi17i5^DoM6~5SNax+JZH)U6P z-qF?dCISohB}+^nwls&;3b^#~wLoB=qSSmo$9yaHI;^3YQ$IMw=YxVu!{l>;V^2T#bWpBX_C$UB?yuIqzE#=-y_&cxDK@L9 zxP0!@k*Fk#VJ^`z_v)Rbc^j_h{r2H3oh~bNqh8syX+xQfbaHZm$>g<}qh>rBp3^$* zwtZCm`h_Q3Ol;R9*CuXkb!E+R$Y^}sH1+kNkw3oSFhauF$ZE*Nn9B8BwYMOua@qk$ z->P4+0hyzw+C7|-8(~S*D{Fr7@bEN^V9T1pKPnCvy5)a41R_O~K<`(v^2Xz~rY*W3 z%X12zI`?dH+$6Jv_10R{gBa+&AUBv_JwD}l^s@ugOJ-(%IFbDA-Q7KEjs2DG#~(C{ zo;fC@%sPJks#9;f*`z8~r%!bX)liX5hRHY|pTJj2ueZ@f4G04)U%(8IS<)E3v zjjGP)E3G@%_vh51>%)yA76fHqEby}%(d4%GIbZEv$A*K^ODci_U~xqw)4n}q$m!n> zFK>J~cc^3h!-4uszZX|U`Tp40F~J#kGA#zvUE zQg;imBKM(Z!oP>cr5{d>*w|PHZL7(u40A%b5Rpxr}vdf`RSkTPaS%F zqC>=N(^?aYZ^sXN7Ts;0J9)^|ZgZ?_`et;!5$)jK?EJ$$K9SaLlTNG(oDj6+frHt* z!TxGfTqiEC`mksW(>Q#YOS9s z-t2B#n4Dj-=bM{?`o6piPafubdCrbIbEWpj-)cwB&pz!ov*`5_FRKU9yX`tropKF{ zo6(ahPE2;#)jdD6OmiA=%UBBoBlTJFoeJGX`x@A-t)0_;u})l6rRoJ870ukQbKi#r zuoSYOVCMU;F~9bx=nhrUellxSZI*4gwu-^v!Ko7~oO(_SEz8~bqCs%jt#3IeSn)2a z$FYp86WNmh?e1n3m(9E#YJPC)iFcZ3`~Ix<{`SrzzvjDbbK@YHIiWi`M(`)mL>P54 ztsQ!D_k7EucCBV!+@|IJ)A`kxfCYXv4whFdc ztG()#ax2%vpO#I#c-kvKvEYiceyG+@6Vu^oWj{mybFKNQs@g9$I5(h)`L|9nRY5f- ztqT3txwIemeQQ&7Q=H#WRYPV6Sy}mAqWOwpph0(!~C)z+G8eeye-O)rh(|&TIx?0q*H3e%O#fn)E2N>YH})ZqL38Jp7m1JTu#; z_u^n=hraO!Z49<<4}JRW{i1>6oKyEU{5%RC-~7`EJkKPR%RY z+`XLAH6>$5rH@}soW-TyE=?9incwm|eSK2rmWfX5H=pX1XAp3_uWm=29=9&&^W$U9 zX=|JQFJs;mY%WuNVENkwu)$j8kq(azhOILi*L^|M?QQ#R_O$uKC9mFIYUPq;T7BRD=CiMPnsdTrYO;mveN*MC@>_!!^^DG3I(M}7 zwH(cUAGM7laF=`CVfapqMP{)ZoqDt_DN9OUb1$Z;>PUz9vA>Po*3I}CRM=rq!u18W zPINP$b-?=eH?^(d_%j@jY}2aaMym)-LyeG#?o&ERJJTA)A7}EL*7D>Iyn&jtX1@RBS`|9{Og`^iBrB_jPr2Sj)a5 zZdB#>;f1lRNQ1uJ30;Hm`KC}0;CA;6>6KXG$WR?sr{bbTi)88t*x-ZRwBY_?WJzT& zR#(*XJNQohShHLCNN26)<+&S&Pya`~{F;SJrFq^=qrD^MZ#jI}D$#41j-}qtM?DUg zIA?!wn^c%lzR-L|u-`d7ZP&C`t2!YaaMDz5oj!T!Kz(B$otaK{xAz{Myfq@C{ zfgJ`1PC1^s=a*%p-X^z=s(R<@(b&u45->tp1<3QmaY&dvG9e>?%H)X{?C$9qL65tw z)9?dW%L+pYgk5I)6f0DS@0NRSIkaVm`ZX;BPwiRdX%^#tggbX{wKLdnid)o{*w1Y} z59d#EnIB^RGwEn3?ai?Z7dp^Z(21{h>HbRFxW8)U6vsa8SPK)oGv=Mgg7Dnj+>n2K zCI{HY;)rddU%#>}2ZprbJOdr}UBjcMX=(M{qI1h1vlhI%ykvk?+=I1?6HO1>gsBy- ze_mcOA@xklq<0S|v!{pZB`|MVVzU-5)>j+*Cude?I`5$B~*8TKfHOFdV($dnrLe)EZMRw%t&ZCSJhZHrnFx8U$VaG7* z1e=?*e~=QD8b!mWPoMs2ueY9la*m^;*Y#iV+8_ODKl@Rjc~TUx{-NTOUDfA;k{K6u z3vz*G#KM~qulQCRnjTdV<7eZTk*RJt92+Ac{~?yyt==f8&cndgw`L}h6Hl2ykA9}+ zon+6*@I7j}flDVU72ovA_OPk0d4G6Z(K6k+j?ZT=0&)s7^0&1Nb(u6badQLIgA`gO zFr_du=zyaZ!z6bYql1z+)kYR8{kXED;@1`Rm#TZxQ`b*zp|kEv??;#C>M{TPM@0B8 zu@{Xf$Qh-#Ahh>o5F(fo@e=zkdS1pOvM252$$Gk0q0uK#F81(%CkOu=Yf4ZVheeCF z)2iTE05ezJ7C@Ff_q2d3*~%cj3rVW}R8!NV59eB4)v|88pYD-Y zT8){^Y)8jTDz&db>_R1ARy88DO!fQ3AhCsFl0?@-w@KH*pqw3i!iqLx2_TCCX(VJL zIIKbd?H%fvefdMY{&wZot>%`YFsXDHPG5^Z^~R3o2W?8wDtxm+cTWW$o*w1=@S4CS zK|!=d?1QVP^61u^s;W3{JY5w=W_qyaI{f=DdwhYsx)|Tp@Rg-FwX^q4?3DE7`VRAw zOSvH$1qr`4W@KlpvEo$g@$E;C_5)8SsOa1c^*;|GKDOv1YBV-7Tu1!-ilHC@haICW z6*$5OxcOa;*pJ!Lz#zHuk)AyJy2Ez@qTuBkw14MpTpgK9cC88A?I~!&w1Off@$Lf!=Nw{J%xxzdO7((o!`TM1;QjlI! zL0WOfw!JRfkzjsh^xr);H8VBP^!C@sdv2w4s2STgs<-O(;j%+S%qbN9Fibcm;%+4P z0muSN4MF5YnvJuYTiVxo3aB*q4D&s2AF~1)&X^x>{d&az^Zl0DoXF74|K-IEid)<5 zWzCQdFGp-n{S;J%s)K%rf(j6u?qlJD_zk>Mz80UeD|8TXVZ)QRb%zc&=+&-~mPYV| zK|x>AbIW>P$U%E>cNFI^A=ROZm03LHq9G%9;kpTVL0nQ~F&SiU;T~cwCp>Q9Vbgs5 zk9`3~GsXSN7kF;;s`kv@2dqrdF0q7X)j7?Z&7>1y1Xk>Lea zv!GHCRtn<_e^?7V;wuaFAwmhu5Sj;#>=y2?{0YdgmqD00do93>UcdnA0+cH31)3<> z3#@2jf8<(`6vLwjCA=aQ6;$x=KYg0VHMoFv23J}Z>9F{Vv$e z*!1JOxsCicTv8jfN5 zASMCCQduSfLQS}qc3mj3>(?_|x`o}JJX`*OFzu)Uu!MiY@xua--8P zef=XNzd03Y6gTwaJBSg{K8>|&pJt72l<2(W+0LaaL5WZ@+|qw8710@KMF3MoFDR^lb$K+|pdwh@Jbd_2_^=OmYKP9|-@yxjd8W=7 z0UQg8EH)3or$S=@yW9)=+xwB13PiGQJ$o9*O`5g-IO-&rPJ{w<@gQ3ab_xw!&4@A6 z8ulEHCp7RD6jy+_GaYJ~%rfUB$xhE9<#OxQjZgMHe)#CoF|BTrXSY7_L5qWB&6ei% z*m`UcVHvGX?)N#kSlz;*^f6C+^ z&06m2674?VL06YW-L;<=ocVI1X@lzYmu6i$T4gO+`ZRmA6ySDu{vm7YA-3Dj^O zS5VMW*@f}zYj-x!979*kVEh>raQb=NP{lz}IBL7sm)@(a`J>0+SQE(^=crON5xd&v z?Zr=s(8TaoNO|DaAZw#ZQxpk8n-NA4+$Bb1il6Q~y zlPXk=gTx$k{;hD?3Qm;JW4jU`RaQ?yF4@|wnf0Kr?PLjwP*XuNIl|e*ddON=HPXk` zY`GPxr~NKE^}0A~^O&?;<=*bnoP7RB%Pq8Pi`Pxp+UTlyI?^oID%2d(19J>AY>V+tf6uN3UL}4!3OaTdUR-f*4PY zqIbZlQ#D6J_|Z~dm%GiNQE|0;v( zL<1aE%w1k^uE!j|=PsORczTRtr5qA^F@cgd=}s`j2x|*Nhu>Np2Ef9S|a(hnqwe0{nMitO}+| zr7v_e-1L|OqK2nrg^muUI(^h%*y{H%QKWuhTr-xHjT~K0DijE4{cTuChbR)Rn-unN zcF~^S}ozv%lPZ#(#MC2DzP2E zObrZ-*4UTmMS(lJPmi}wbglOl@s>goY$}R=Q}^SyMX$VxFvBycM>Z*sdAY6 ze0j0Ugz?B+y5^e4?V?PDLu>&jQgqO$Z)A(S*ogu!r#r4W{VVLRtp6#OPjgNly=Nyr{>BP4Q_xBLefib{x}->v4Xya>crC zEv;SVUYeri?wuHVui?CZG)G^o9pg86OQG(7vHCxHAEqN18C_u8I{8uQI@eam6z4p+ zjdZqWm{oR=x^b%qtL>vsr^#0CI=gcBJd~lYko-=nsK$z-{~KYOuW`-O{E?h2-To zFjUKCMn#5lr*WxK-L~a>?_PYxNROv_?97SAYHDgj9zIh2xyJzR9YgWyydm|QukJ=R zW1il+&7lU@RwtYtb1L>*{ERU-tpBNR@lLaAqkfCyIuXO5)2XsB^?q@&y`$p>HdawV z&4Clmai_EC9o7R{CbME6X4!DKMjpS1R8;F~9FM=h&Q15fTv|VW8xXWB_5AvjXZa)M z%xTqq*OZ+*eL}B{3)~rc;IY%RU2G*2nN^=(KhsB*eJ5z6pe!1VmxEF&B4d*yGbQ_Z-Z`~IZ#60ZGv7Wgz zQd;|eI-96z7klaXGZ9IOw zJ2kHiL#Pu)j>iB(81Jk*DN6m`U~T0Dn^E81S^dgvQy<_a#koRl{H#-M7g`_ojk$6x zUn%&)*roQP+bXv`T%>j;Ik{W+-Yra9Ywp5n{Dx5z*NI6f9Gr&adMZ3vBZubSJ|HD3 zCHQl=Qfwta?o}r5iLW4}6Z!?g14REwr6n>n%2Cm*VL;HALQ!n4VLC1%Rp)mF2!Vg zAzwoO7sfWU44JKhED+a0zz$z6XDbdM2Kz+4FQ$UIietwi-Cw8oq=m2gk=mwH?^Kf+ zlQ*8K9AVyVYh{A9T~)*y7k1nyDmFbjraa37Flmi!VVm zMv>i_=`+=`(5;1mL?wcpO9Nstu1oYK!ULxC5!XZ*d_ZZ9n>7=Hxh&rSrxW4`v!Na8 zQAYMHIa_?XwaQyfD}UDX$+G>l=VsJNw}A%R%SQ#Nj~cZWT69{eFS2XVg263hdPW_3 z=Iq%rm?`44gyBjbbb})3uB^eUyCLZs4v7&2Qz)tt47Y_r0V7wTTI6tufZgHALGm00 z!-u%$_5ai@9U5_G`p!z9C-w`X%5#hB{oMHKM)@JQCfassY}sqP+A1b1H@6ud zUO^nq<=jB%!1iT=)hlra6H-3E8{&}(*a0dD1*%^?QUt$ChKO(k=wWE|-hcRT{_$kx zABw7jeP)hHlb4gBILY(t9J|+P0guxRH!PizrT(MHD$8}@Le)tcu?5ah)E;S6?hg+K zh^lv~urL&WqUg(GY6LJ8)k+8~vEH5y{nl~RHEu2U1up`@0X{wVxIjr>{nK~TZN?QE z9DMigj@*!=T{sO}KrJ~xQ>RbA`&5);B9*;pYv*tg04E}^hrd;fTj8C+nvj?}2bA45 zMd23ZJ@S_xj6aob=KG&_g|CM7X*-?Qm6a~(U{9aSqYH}uGhm;2?Q*LL7v2~ZJu`Zr z=k}_(QRgq_^u$m;;IuDUvSeKwOY$kjm|&&gK}daZ7pFHtKfKz%-i?xyFra1!2Zwvw zZkn_Kpcsi(t^qaDKhM?ET#ELdm%u;scb6KnF^dVJjeFzA$Vy^NYMp=E<&&YVTM zgmiN3KgL00I43iXP0F}_y$Q1+ej@{R%CS8dfc6ZXXdDFH3g}VYc)IqU(6%q&Q5ptj z7Q0y5`L3V@WDI4*Nj-=8l+rUz+^{$~JA=uGh|;P&+Gd z{%eDYL!Cb#j9i)UV5M=VqM7LxHE|2Ia`gn;zo z2u=F=a(L}XU@*oWQXoSF5yd1C{SU_gyDG$lb5Z_?l0*#QCs>}n-9+-tUti2MPwlL< z?A|1I%ZSA_77a=vz=#`uymkVHvO?5%*gkY4^*arY5c7nffubGi%yE;0S$J1LO%}>1 zM+|N=0TKTHXjT*oh6K@vY8y?Kf}k+69~SW+R$hqymE7m)?bU5HU%9U(GB3IxK^maK z{$}O`WXzTm1yG0m+7XMi7OZz6UqUZ9Ou8L7cyLk6U{=py>j+^2!=T>;7bJC;u3TA1 z+E#tr65nN%P{V#ykEYi@xAT50v@M$m0;;=j{;S!I{DVEupon0>4V)H?igL5p3>b*t4IezTe@TV zW4=NlWEq~{k=8q%2&CfLu=_tR)}jH-iyT}yeL&XojWYEX#bv1lWT}QO!>%5EoM3a} z?+vY1guof={x%pm(p)g&Mv7aWMCWbt3Mo-v{&B+kPiMFJKbju?C~O;PC`YST-h#8Z zP1xk6aq@XXnEd=jQ^%lOu2i3PB80+To?HdyB=b6m1XHzA_3zYW|ox9 z>bU|iB+ov>pZo%Drt-VeOip4(hC|Yds!RfRrYCi;*9xs0g9n+q+9EZ9U=P9c1-4z_m>dy0X5xL;n|654)bD z#n4fkHn{%LGGG6yYa4s?j(s|&)VOI=|EbZYV~o~6fBib|gzulGT==~JIV3s0lW41O zj~yt+RD`4~VlM?g<>E3f?rS*YB8w-1@tu1U7kE$^vUCD-hbvIa{sM|D<%&L+wghpB zEXc*w<_>I0NuW6Ld`++YKD=YYaOy?P9HM0&55b)8NNIS4J*Q}IBzxl_^9bS9yJHXP#VfV6ZoZ`#AD9FQbg#VxAQD#Rww9v?g)z_*TgD zsadmb1Ock(GyFE}8@}-U#Rw@Rg!nT70Fm) zzcnuPaNvc*SC?A)7#bV*}f z*)`8k141&_Tnvz0dMro`3ixpyfc35^1;VJcDxJt~G=LGc1;Sm!(%9xG3PuWQUog01anXXRTI}M+LM@O%a z30qnsq6*|Q!^G9Z35bKuTuQbm;hV)5gx@dQU&dh)y=S=JFg~@OXyPdZW`4WFl9N8| zfFr5Cv~g^kJCryX-%Ev%$TMDJqpsXC^d zUelyP(|uBU=!^0f(`=_r85}$H!^dX2R{MHs8SY(qa@wdBa8e{3!~V(KCkUf_{{B^GlwA%&wS{addUg&7K4HuhJljTxKg~M! zDdI;(t3KMw)=*3t6UZa zY@$;~0-a#DtVFZf1h)}J%=3lvdSwtVOk>@gC5vDSH9E2WJ_;VP_iT^9e zxq!YcTeiF;wP5+)`68f}TqACggfq?~Pwr@<3Eov8p&;r^#;#DjY+3w)8Z4gnlC-F~ zE9zF5I`I}hq4aV*Ep@&kV|Yv16#Y zZU|Ey9SiUT7q}Hpi+pXyJQK!eiSHVnoXF*+;-m}OY!zcxaU4AdQzlB>Nh}qj;6#&? zg&(DmO8HPS!KYn#it>QKD`l_1j+CnANBjh2N)ukx@=YRyKqRfI6 zpNCW5c2OkE8QYHhYt#0QW9K_=NSmm({=_FKcSEz$YWsHH3@wJ_9x1mb<=sj zW{EVH{WpZ2vaX*IyNFGLxT{TMHKSum^6d1YGNF1Gd!E<}ADRj=1GDjr znzs{1>VVM}7pnrG!EaIB7U>F=Wa-kZ!a_5{-r8MHFSmB)LIMv*Uv+zn1dSfYtaM;~ zk!&Z-H;L5FO=3X74ZTO|bi9Zu1}wK6{ZQlN0*4{9ic4R=zV4d*=var`xq5I^L@~&u zb3n1Zp)FlKG2L7-dL@-NF8ec*Vyb#+J})_PU};Q$?%R#nCPw}~nXnZb!x&8$NmY0aA`DmMJ3GH&pYA=buv z)ahg!m>^=C;N*1T?k_I>*Psv7*c6GUTU8d{ol-~Pin;qznfj2jD(7YKs!9q*TE-q_ zz(!w!o{Xpc+P6o|ZT5nwVZ}FF+m|grl;^a$aSKeqc7%ooMkg@KTjG|uAqA0zv^Mm& zvgA|U_y?^Ib84$CzNd<0vd9pzx{lBH(#3D|&&N|sdKc|3NLMj-1}`0O=-fYX<|q59UG(*AS{2=-$K!Cf9z`8U z633fYE$H;`hLasLW{6Bo){xAhN)r%wHJ!O0R*3+3@N*sbr@X%&YrjZ`otYVFwDcS| zT^t_zcvYB+E*}U%P#Q>(6nKN6f5DaD^*u+ma-7h|)7!ypi0&uWTcxS)m5}fmeku?Y zElxD7xreQZLaT9B@}@#72E|9&1lI;m`@HX~lb!MCUm>O{7tPmN#gtREMo_g&!lB(J zbbEg|GvH2Npx3NI7>7qUAEiR|^VGhDy@b9Hoc&7d z0fi66M?xW74g06Z_A?FZ?X*pGZ@fkp$ln|_@p{5tr#X+O`zBWQieCQw*GzgRnFR*~ z#Kb*D4F&f^KgGlg0QnrtID&5@YT*{nFi>mr{C@~Db)a^7kn-T&v5`$(Po&tfx<0#k}Jb7@&c_} zdqxpR)`&DoM&}?5!oTa){xyR`%nU{_Fn;$IP+LBwQ?Gp!Th`k6e;F__LwA6OjYE|0 zeES7gy?pzlhPxKmW(%IBrIpV|TR1I_XtM#>;UjhmalYS|GTp}~E0fw@h}1YGP?)S| z@(N7>X``%Z#Cy1I?GQp@F1$Fvfq{QSB7<4a(cqt8M|G8ls_ItlT(;QBj!=^A-Bxle-K6Lre04R-Fgn|kT>z1IyIHnQ*E-f;Pwfiq`@ zj2UZmq`)>!EgjLRC>}vkw;&`#0wv~^xN8E+gWmFAK-IsL`u7&-4E)p{(Xy1EOcBK! zOswr0lHvB2K7A==HsR8d4vd%_hgYOrc{DcS#|j`yTZ$9=co41$!j*wa&3_$(&o zkCJ%OY>#jHJ((&$9%+%NQ)1W8f2n8xluU+whI^MZUDVse+9uraV917;{W_ZhJ6y>w zQ9}~f=yPu!9qB1v^74q5jaKs|`T?21R1$2HGbzS#ETeggPVTo$1teT?%ixs2022>S z)2j=`sG|DQC!@&ewAWi{`9N~#VG_XS*K0asrJ&FOvp)d|R!^pB8~7$ri5 z-9}kJIQ{WCz19(2On9IykP76?ixe5GFN@V+BS~uF^dg2(IHCY^=n;##ZI@|LBqA=W zh&sNEF|^=Gg0N9G2<8AP0}p5qlsG1zGU$}vk(np7`Aiw+@=ze)ssg#e_E?9{?FMRK z!LV6+n?u_Ilqc98rMmL3 zm^#X-ZV^NE?MSkt&SS5mQG+9rdj2|Zl>)W4i|e}k-qoGwS&E}2_i9XPA4q)=3m}T{ zPY7ZTKVf`HaYj!fOcsVGtflyhOiwbC&hvfMUB|gJ+vU$FYx*Y{34X~Q5+y$mw#(H3JAP{OC(U>v1?d^NN_wIC^;!vZ+cHG=oE&t8@^l$3*+?(C{58T7x9HQjzM@2YU&=;-x zTBbtcTu(zrV@gXLVHRx(`x@^pam!)$MhkI`^Y?{ufiPC#7abJ7mA3IA#-$;apI5i&B!WCC-n62@vcijTmuB zqj1&llA*R;MVsW-I+F~noKRa$Hf$e1)*{*W!o~K3mMLiNYVYDwX*79l188~DON!!$ zE#TXz`o(fvejaTd?A3BMP6{>(WuhA1PU*XMx9hhUa)~`H?F`&Hh}#UkiMZi0&xo4$ z@*3WyO>l7^?S!29!NM$9Z`8J2DkSaQfeoa1R#e-_V`g;noIQ43h71W8_CFC%g>MGe za$Z7vM9KWAEaaY}=;@fvWzuhpsWWq%I$z_&1y_LEZcv%kIlzKX0yyA1vR|-SNPKuy zjfu6rSpDCBR|^Z3#lZl56D4srh&sC|yKrt%80ukGCbrbvCmPt{gcBhLQVL@D$qSUg zgWX;;@Hrm%hLX*7IjnYJ#n$mfZTpA$dsN+DqPSu5PG{E{-`|dOzIx>3r{X*I4~%SF z_p9!}8PT3Lp`+D74a*>Pn1@+<{JmCr&1&Rd6@CdLnH+2_q5rbc9=lSqb2iPyeeplL_7A<1qY$Nh; zpdO9?9|)ItMKMAW$p##c8@QU7px)-4;nyUTco|d42_oVUCB*WcS*F<9GZZH3fWwQ$ z6svhA(4$lL;o)2jk>8U7THY+Crv!Mun6Q!VMa}5W_a4Xqj`%49PGR@*BgA%^B2vhk z?2yFY;tmBp(E)>+ZoGd<99SdJGLPj$3q|LklCrYw5|?cjss{W&s3Yh zrIv^&pguE@@q>Imycnt5)WP#?rfK$|ezl%G7u=|4{&dA73)Qf8p9UwwMwwxaIWLTU`Qel_WFY2Fb~2;xYW-1wHozspW5Jw?lXVS0_n?Q+)`mD_He%S5_sVK%WB-)?iyx&F4=lbpYDN>+sGXIn6I~6I&K>yt=P$1H>*O^yE!C{Q zmA`($+-v85B!}HSUNZfbm&$|C#L9vzuLf+S(q@PrY`lp8s5norf7ne<0&EWU>HK|Y z=}{%x1g1BPeC@kN{I2!<2I*v0JA(>7Fb>ukP%+-$G{DiZEPVVBk9y)l3SuY$b1vo~;t{JwrfvAH7d1N?BY^meAhV0&b z`~tpGTVH7Y{@uS^`|GPr^1Sovs(w-Nkw^EZINe>gksHnIvN;$jyIU#2nG9a6{{FHnWY@Y|#anGwe0uig#rjRzWi$LH2CWAd0hm&NB*C^2e9i42 zU+|GG6v<8y;|CF~$O;xPZLtNxItkDlwkAp71s=G#7|zz)yF=YLCE$0z^~Jz5Cgwpv zVU$jR*VVh5wJhy=^CB6101zRZ7+dGdJrcZ`Ah3IK34<=BkdXU>@AKhCLq(vf=;f>Q z&1hcrj3`rC*CR}PGy~!@guW_}R$eB^1qDjN5R^#)12EJn2oM?=2a#hWL)0w+7NQy1 zl@ZM@0zl<=tv?h$Yc;Z!PGRYN(^09z40axKPKk7=HS+u^$|B2=#p-2yoAuolN;%OZ26L%#}e%g!p2@Oj0m)L z_3;>5S*xrZ$9lyakZWMy>?cps^W0Qo>i_8bs^(2`>h_$Iy^0%rxu-L%wVSpCb_y*URX!o*@^GeYe1MGfhF_Z& z*(pvg%~~1LeYAc^<}PdDKGE+R^PC7YwICo5V7Q&H~CIG6;-a(R6R=F&9VTn zIYVfzoI?R}q3W!{9!Zp3oEag*Py`tJu<%732ZatIcs8OH8Y9olIgS3r6t}6OG1f^6 zJ9?qKZLzsSnOoA{qnx(x$Ft6@$fuxZc6;R zfO{@+T-R`_DdRJNK(SCJdecD&5nv?>16om{{Ro=R-jxozzwwId7KggLUxDn`c->;t zx%n4r9F&egh?q zPb=M28khQDNqN94?>qMMLv3e9s7GbATz58Esq6oq_O^7FJU87gzLt}VgNeco7yF*> zRy&@xY6?!KT{X|woeITj@67+obWi(zE?sf6bs?(0RcAwzOfRJESQBtwt8cEJpQegO z+L?OBuRKRi99QpwQ|E`7*Peto`tw4!h#qY7#oTX84dyRPKi2%K_t-r*`z&&46lU_` z@U$j}b(|htTRlau@6V-nqe?b^oGE{8?I6_u!lm5RrLuLEUqs6$#%ulV-uqIy{Bu_) zef1F!TQrp~d8c#Hl@{MmI^DXyqvqTyYgbEyqaAjhtCzmJ`1`N3yJDsUtP44xb^e%9 z(1@C%{aYrqo?`GjfLZ-hP+*djo*1I_NsKT$%wG&u*YM?;Et8{ zbk;pM$R;!+&+N*JS?iiCPIy)_{I=er8I6YBDx7paQfFzQ^No4!L(~t?oT#Ex{-M^| z(X>~SvlA|B=Z%E=1y_NXG955(0;O#Zsvz`5A;6w|6A|r(Oxf4J88Xr_2hMIMc6sR!TxuT zTHo3;RPEst?c62JmS&V(-}+Md-{+H7X(+sGrgbhR!JxUB>X9EE+I4)sVSmWJHZLc9 zT~%awzF}(1S8o2P5zWV`DTcNA*rX=D+nsx%7o#@(IP-l-a^~`9Wpg(ik5Ez#x!P%* zr{#+Nf6v;lYOvzp@haPU=c?G4y?s?WdXifAty=&5xbU*xIK7F#rlr06{l459Yga_{ zteNd^VsWp7QgEfEcm3;wo4ACu@-zQeGhuHt!v}ljwr?@ct*S!y=iCM;|4y(cQvl4jHyJ-u3EcsH)e|!|wxMcYmRGjlsuf`2Qt+_o z(e}h}4}FdDxY7~&ks+FwecycjIa@hMy{}%D+6t`!MhQ1eR@_SXFje17Rq5Y)9kw*{ z(Cn66&vU?&mN7<0%=c(nogHJU^>+u?{(Py{rLNY-Fg1Q zH)nZlI~^N;NGuDPlX0{>IKm)@H__yT-kxbcm2RCGwa5%H&N2UAPkS|ak^XR`!hc!* zC40*J-+4I~XuR#*x^=IRF(db`x$IHjai)=3hihGrADu9_?{7NwVx1_HA4wLEmT8>s zRy1hg1e=l9XU*-df4^n>rZ<-^%yFM@d}M%@Ubta|_k#TOqe9f({z9D69Bx(2QE+C?t_*SM^8qTpB#P*8I8Z=M@uL&sCcF{_%$yL+(c_ z?Q&btYy?N}WAfafowG95>g;`ZH*eRR{X-0Q*qhtJ>3#Rz*mI+gt7ES~ zebq_x_6ALO&am$H=cyg`TThVKxLhm1&6~86@3+*8|EeM-qx@dR?rwTv zjvAMfA3K=^&IxsFn)6TRM^C*6|9MqS@(Z^Vb?aFwb4&9F=_Ri;^E~ERX3Y()^|@T# z>t%`AUdQrN6SJLyl-zv_vX!ls^bUoNdHFlhD0mEgH%nWmU|g!c`NO?I4GuqEihyj| zlg(dyH>}lD&u%eSx-h0F?p~5udM-hokOl3omFh#-)30B zxe@=z)qB8W{kH$Zmr+I$g^Gk|S`AWCS&=5CC}c&lLZOt1j0mNqGBVn-xe}3)R7#Y6 zWmn276-wxNANt<^|MNWe>;Ao7zZ+fS^Ep4~c^vO`xD;MwpuN&K5Wd+J!+lYBwHQ9p zTz#wZd)sTZ)2~mF%)ViI7_ZVY%PNugb2evfxtTl3TqK;H6v$8e@yRgpb(DY6!eigs zaPC{Lo+exHuY@ZR-nD*-&e1Qg7LnCgMW}UW(Sf-<+ZWC1Ry>HA&QML1)s!UG{GklV zV_$Yb#>=OU9%C-#2q5XgR0q&$s6$o*t))-O05B&QK(9j%13nS+X#hyVQLMXp^K^TSv7kB)ovhvq1fT%s?wlW5pn=aRFxnAy`zG(cZlz z2yuf32faJNLFDO#WC`M08dgKG4M^mqE{4=L5a^;!_=fZch@fd$&)qr&BJM;?65lv1Wg`vZf<=dI|(eIv%e4O zjN5@P5#1Zc&BAdC4LoJZBJBa`_MuP@P;L=4Y=k&L{70oX&8dKk>-6ZC#q+&?8bBrY znwoHoVmSv56?oZ&NFJu<824}`0A%QQv_L!|0J8&q<}J{zP!P?a*(YGHj$-{2K#TPJ zmq-plUZ^xoMNsll%L+{s2_5qQ`+xiX9X4f*SJEKuE&7qbm>Aa3hK2tM^_+S8&)YtD z9mU=6&=lM}S5p3K`c~a7k!O>W*M9-uB%~@SjX=Y|!a{N2TVc&UW4{I=prYhwjNUCp zNbyT+m>O`+fV_V|clxZnoXm67hrqvp=5O28fTK`dMg;LQM-kcPGe4=N2c0=`#XR~H4?7xH5oV@2^*pj-aK4XC*Whw!!>8goma2fhg?pKFlqK}U~EN7yjU3Y!CIBid2A zt$^4mDT8*KUzN*R@9&WX4up}iPx0sjCaxEFg^`E|TtQF|`c|k#2{fs(Gn6X?(HQ;f z0B~R^_9>wd6a1(p+jWl|iRk=*z`9{p0o@ls0az?$Bv4_NuT0yUf=Ma>zQNQ3`v(G|mMANf97w-| zzH<_ABhw9tj7wLqZib8oG;3k~9p8VKs&}WBU*4n0CZ(q~X8W$&uRORrv~85r{pjyc zrsPI&yc2#lC{UoiTlZAA@z^Qv!`nE8u${rOO)f$(F_XTdQUJ~fa-?H`qRZsuK7hdl zMj!#njf|dV>R$7{3-(s5ne+8=X4|g9V+iy#6Vbug8h&H7K7aW#|E4oWr}Y}vb%vUR zTMm}T<8g->9$_K+(1S|LB!0YlWfX5lITt)UiSS_%afzG=$iP5!-Uf$99W`W_6uE;9 zWE;XCkQs`sj7!(9y}(_&%;e(av;fCi`@&A?Ei@&zudmZ|U{x#1IC^%P>Pdqh9)V4yKqdJ4 zLA5}B0$t20csXAVlUy7;3xqvLg90N7zz?W8DAkaU7?lcH44G&_FVdK`L)e$Gl5nh; zYHdp~Tn0u2GAm zt^o9p1~h>U;~9u7WW6Thl){yPeq0MGaX?C-F8ScnA-Nvdf!E*{1F)8H0uV+8_sEjH z&JdB(`iG_wLmsU_Gm+|&jbp0mo820d$AU)Xr!7j!cOlcF%2S4!wP5)1QWv|BF12cJ4$#6}D3d9;>UXpA1zh%?70p4WHHWId;gx zh46};DPTkdW}`QLW^f3l!7hMtTWcLr5AveS&kFRwYp)*u23_ zeTT+~G{of}wXWDeGENs$in(>`RtW2Ny@W)09*jeHRM_E)Jp&~oz$svps1lYcDC~Xt z8hQ)3?NA&b-(fA-E&N3b%3+KlS+a~Cgo_%FF8;cM^6SORmyoV<_?Ng8rbtf~exZrj=->mIC_44~`)}X5 z%NfrqOQJ{ ze=bdl^VqqN?!?W@3p4Ai$v#xlZp;1N&G*iHO}qSL9oM&iS|`prWSn5<9lT$OsR)<{ zgJ>R)!Waq_Ls1X8tIv;uPj_{b&z?AX+v3O;&xpVQi+_hIyric-c(tn7JlehmBCk*O z8Sk%N@cP#Ykoi*HL%|$TaeUW5q*Pq~*9k-JFPk@C?{Hd(*h0Yf%PlLjw!u`dlgy*b z^=E6A;_9bh3o$|^?|Ty_cc8jhcs~AL&3z_8W5U_?d1Z|wk6^mCoq6TYZh1}PhEIKu ze%y@Tk-R0n;zh}s&v%1G6T3wzGphq+-c8Va|ISxRNx=Wv_;>vL-)Ar0_&(`B$x?bK z_ZzjN@II)idISSAh#cpdk$H4ve!j;!hs%wtuWchUD7IV)UohnZd0qU3MuYOh0}Zh} ztV~4pVJ#rj11v5X+weRgFtR=JRrvodN6OnEl0xD7d%NbOhdy5&^ZMqUL*PDxWb}lh zFI#k5yt#MtV5z32s8b!XA{WZa(l{>gYS`e&R|UgT7bb&Yf5AgR9ynrnN!UPQ83b-Y znuLXq4J^9FMBOqb;~vElFYHas#S+< z4EFC0EU|l<(2!i$Q*9^O5CDW6LJd&Q@D)dA*#iC%jO3VNXq#>JN6AhQ`3H#Z5FFMBe3z4p%{8{^_59f7u)ofT6S$)Bx_Wm>XM7c_wi?~?Z zFp?DtQA{-_9WZqFYapS?lA{g-JK0@vS<|yh{}2+%yi7YtIKSI$FrSSh=}W^yl`J`P ztAL_;a1&z6%qeDeb{QfXFaQ8%qbi7lX}B>?=%+7V@`66WAl{+q3-DftdF&Sa_YD5x zdTSckDDqYHZEc(ZOQ_9C?)`n8{TU7<2&xx&3E>VTM0{|u!^Vjz$>a~gLkZ-a_GuiZ zQqU7KFh2D>%Krmj|L;|>fAg{6$|WfXF1N0}$eiQaB80QK;n#y*_w3q=3cs)2uwf$7 z8^?Rp2+|ZG@G$*q`|kq@m@0xNFZ%EWi@K=@1it~nw^hvnr1JK{O_Usdq8yNQ~6!82hLR&UD-=fvy7;y zJ(r`6YBygyy^uoBf$3K+O31uv!9|(~5UdRaU;cZ>>& zbu`yCCnn%_NV5E-oN#@LVOsaNvb2gyM9%qbnp*xV=7i1L;CA`t{Vl3F5juD3R^JD0 z>p=$|LZ4tIq10#wlyy{+qAo+Z1da&WFR~Uuft&z!Xvmbu2#SP0L?6R_kmQ5YId-GZ z{OqP}J9hBHYJrm+$@-db0mH|qizKC7hahTfKCAva8~kqT+#f31RMqVOBZ5(0)Tn00 zn7G_W)7cjx(AHCp@{5R22@mIweR3fnu25Vz;zmMLx+08JVau$+F2DEcO@%ND^0ql^ z-DJSv(_kS4vZKiZ9Yhm?DdP_+Bp8h7^v#k3L0<}?2F*&xO2zTJ63#@FgRj6rVcH{& z(}jM724j;E3p^BRYwS=fkY+`yGTqt#!^-6wTE6WMt+Wk(Y+$OpU-Qr|Lqo7c-Am&jdJ{us*S6J7M@IVQ2qr$?Q#(@?AX?~fe565-srDj;S+~bZXmx`fubeRr0IY% zM$TJQM$| z?vl2TficsYVwX0iFG@Jx-=Ego#ei5IBaTjv^y?m3ne@&c^^BD2O7;<*d~3>JLY5fK z)k01IfDIH`Fz1nmsKzpJ`gX)a-hDaIw-NJYQIz7g zqZmtu|Dmz5kxVW4a?i+*45}2aRI(a1WQ+aBsig%5t_(1bNDz;%%YD5xr#C4x&Sl^3 zvD{}ZucLN|v$DM_oWJviLZO7I3&VD`J(U9{(03sQgDkBu|8llNb7(QtQ-|K2W`{9w z=YD@q{(IzWQCISk9~*srY)!Jh6mo6$t&UMT>?YEe(7kV<>Ic8Up#B=i*v_RQmuC$P z7(Qk9v)Hur>z(2=A%4ExR*BIvn>D%5D$U%bVq$yRr+vWbN?c1*k=}-{4G!jr4`201 zEt}`m@C(oQ6C!{{0wuY-Uu`4P5?J8cx;iQNc1e$hD+Ah^EvlzaVzG>#pCmVYpqh|V zQ;6q0wc`S4OC(@C2*shlwl*j-I5_yZnA_Twjn@oZckp>s59iE^e=sBOXO5DoYm-cH z5{OgrT3*f73>r>`mnwzg2e=Iv<2fR_u!<>p6;Y0q$Qy)!DnJk`pf<rbu*1QLLCIS-A23L3Nqh0CEy!A$0A3UDUBD>fQIj5ndgGI3S- zViuUc2w-KX3C`s1dmufbHutTUh1O|FX=!P-RvL6fMt78Qwk8cwX#%d3K+x8&*GEqK zqbbq2wTq;WB)EepTq{aomUydK~ z+0ANM*Q(^9(rcaCqAJg*!ye#ll6COK9vFwJ#74J{XG5ik|0INqU2LP#dVBX`{G;%q zOG|5CW4tY|n7q9FRfL~4!^`o6r=4cQxZ~}? z=k(G&*B|q}8~C35aP2!M-x&>;q(0uBddxlWkbZCycWAj=Rrx_7&ux+VCMLZNm5DLi z7E##cM$E9qBO?6p^}&^iw{K4s30=6PBAk}q&UtS>{;05caHm7zrgU>$T3UYQJ)X9| zdDBcD_N(iCS&?4qfpjbK9#N+4#Npl6(kR`6FvpuCLhU>E=|8MCl5q=cs_AC}Rv=uX zr>Ezj`EGOb2;@kS2>Q&r3<#jK2i)_1jFb+&yb#_da7y)Ry}BH?-@ESERL|PR+OwUV z$(y%sKU2OSDdX-O9cwYKk)B(>crO+=31S6ABqt`a6*n1t1Qv(!v52VRqq8YJojh0& z3*f906%~a@gu$R^?dZ`}P)kta0X-LxgFL0g0*K5f^N{er&!cFA*Y2mflaoCC!^@Y? zqIAz;GMjZWAZ&Hc>zJyot4nW+E)WriXC%2d@7xLRAUSC>fsiA#a0a{|OtYRiT*D>L zRo}n3aAtb5+bxNS=4dPAIAcernXU^KOnBNDil4 zwGSS$=``GJ`L0>`xG)~(ZE+ol<`?_J9HFv3O$Kpt$4 zVL&aR92N8`oKG`NfMtEd>5`nATPN~~3%XJgP9S-JQMH5D2jT6M4M7HL;O~R!KI7A9 z?yA?%&mEPz8hEo0@_**t`A(zlbHINJGtp>$o*wXN~<+fYhd7qf$3!3A{3xdxKSGq8%r7F$l*A85|Qfz zQ{S(zK7Xe%C;|_=lVzrH#>o+9fW=(or@4OpI*q7=!Jfy+%w3ouK+}``xp}~L(ce_R ze}5h@h)9F!Q9eju0s&gIBj{BN@I6_}Ox<2A{56#B69#z*Fa>&AL_()vdsL&;##0F5 zf=!MKYT7_-tuWNi#f9WAuh7MJ=lqo}!jkCMmoeR{JN9<1EIvPcsKD>xA`n2^vj)S_ zA*{S`=~A-YpT`4_8a>-3l~m<(RORg*c1Ukb=K^~F-np&0Ib?Ly3mRzFOG9k~Z4^>8 zjs^#52ex{wa&%l*VJsyB1d6lH_7Z=3%CM2TlnWbKpl-XHno5ylfMexErjZL5@qiq# zWmmvGNX0{k%9G!Gcuvv^hxe1j5>1!yi-V;QP=$h5=tO#N8Gv)AejrhK5WYjY?F+zy zY>X6HgbzWd0jjDI^UYZ9!R!@Hx2ntb=fzK7`a(^Fv3ZOAeQ_Q?U&!i_X3-AtcJub_ zdxWx5F@wJ5;m#w5MtizGUz{nkcv1V9fizMTTbv6(O3<7?Wo2c7O@dBNPJlRRC=tMG zKk`Sv@tIf!pasVTES~{50m1hOVG+z(?Kp>o_fL?Y_`I^R45>lOF}fC2Drwz+K{;mSE<`T>ISo7M@FyGEF;<3vCh%3k!o_ z9%3<9@pagF4Sc68xNz*bXutZg{p^y**X71ba?=q8Pd+NMuEF8;`JTF(r_{FDuIjY1 z#{8pub_FP;aOc}|8xCk;SF*RauZEEj@i)b*mlXn+r*(y?ka^!qr>#IzG+cB75W_(_ zsJnajEG%H8unz)zvjhP|Xt%KQ_KANc-$B%H2Qc3|bF$vuW@xC!8-O)Kn`7L;} zGK?iZ55(Q1_=?O6538H-5MpPbK#ZOq`+2Z|0pXbe8!wbb8-9P7MfpgWdo|-_&`vDi zoBEP|S$Eu7JJ#-qmKu>SDO#j+Vc(q(x3db2+;*>fPqVV;u7k;Zj50 zzRr8Yf(4~1g8MZDa6hp+_`-C4dtOHQ23oEeR2eB7efdH^`a45q? z3X@ICV}~M<2rUFi2H=RW#wNw=A|t%9oCvl-G@R2Zv7JgCTnLnB9VCQ<)lFdTTpo;& zLIFoH+;H0dn79*Wnq@3!=j^O=>{w3Hm$^8P^N}5U7Raa2Y!Q)(v)OFM$J{Q(s_k|2 z54E|mWWDvfJ$X#?y(R7m%o$bTDq*29!_ghRlL~RDRK2j=`(hmKQwQ7yDkj;N&4TGtEC2_}8TK zc?v&s>RrE7oX7TJnsrydL(V$6M+Po`1p1tBnhoUEU-a^1JzwwhVqrAJC?;;eLeSQB z`=A+_E?sSwro*Z`(^qY|ZXSlmQbU;NJQerlqHG$wYl4DpR|reh+2^%tlG50+{=4sc z=VVP5FN=*SPvKjo^{&H8yeUT9=0{7GFyXJoLdTt)2Xh!W#lb9cKg zpV4q4XRX_Idf#E0YacU&)0CSdV=Bx7mxKUMWlpQWGHbdtzi^?7Sjal;#z{-Qhg`%T z3r#|S=ma(eiI}*;DMm{>IkL@&J-eG#`=%=ST~}=N>a~$b8+HTNb7;v zXZ@>7=GP2VpJ@_{lt~^RmoLk?dV;l_?1$N3`Q&)PX)fjO3Xcn-iB@11zw86}HNX1ak; z4UR8l?{?w243Ol8Ha+jtb%zX)Ifh0?))3~Cg$ap;j{Upt8NvVn+5`ECO9{(|`jJM3 z0I1wZ>Q?VxSW0()xOjBT@#$BQWIwU;prooC_4n@&LD>>8T8$dsph1Ay{XL-f#RcCV{)^ZIWR^Ncbg!K`v(vz> zZ_aQZD`<1!eEHN=eI9nVqgldVaZcfianM*jzyeg7#>IsJLu13{X&PJiWo9V;^t&cv4I$Dq=WKi&R${P)qjyLyrWCr(S;7@1q8!OG0uE56vK z%dvX?ngj`^Q`?!y$X$3MEoy^1KG&@>uNkiYRzObAg9nT8B_e#rq>Dy-WZ~(PTCqY9 zO8_kKSQ>d)4kjs@T!VVz0)6O9X}jPHF-9TuCW8-qF2h-B{hlYk|DncewJ80-2dSp8%r*bNC5irq(dFX@@xPTznm++u2 zPqcKkir2~{@mD*Zd#7--IKJ#~bDJ4?Y5m5v%Td0d5IwBoISg?Lcp`pSsw|MaLGwrV z@83@*H>zqGzJY;LX#@z-;K@gfAGs|=ZRLsxt7L5dcwq54HUYSYq=g6H z7LPpbgk^97+D@ybJbt_+d3PyR5tUsz=^`MBd5=*6^oA2yg)|Kty&2z-6gH-$w33FB zc3-|lvT^>^r&WbDY~y^hr9Xs^46Ed}6n1Uza@oyM-Mqe@U4Q$vl^>Z4w_Y&?GxqUu zqMt9Se}L@xHk>_qQ^MS}Sw)NXN6LWaW&QL;KZG+9o-gJ_H@(Q}+FDMW;$(h9@&bVx zAeNDbj4OvZip^1x+q@IvFW9i5sAaY<4pLMYQma?j zcZIf9HysuQxrd|HUbYhbe^T~bswf;lBNxu|<^I?Ga~4~TGrq1@+$9(dzRc3Kz z_^Q}7RUV!8!?RebKDVTNB9tn(^Ub;29GkI_yL4^h9)YtSM_n-AlNqDM8odtP5JMf0 zic@6F#_3g31Y7Op&4 ztzg{sW^Hy{=0L&L07oB6%}|tt#Yy`^>>Z9r^$i6r3Z{RH-WYz?RgF`q-7bks@qJH_ zm`neX*$=+Ba9yEOcx;(IM`r7l@I2w?3H^N*OPsF0y`)4Z_+5vLHy8avyVX7(t}--z zJFidBX{s$=VZryn_K!!UX5r7@xXr)`<45|1pT=`TLLw3-Ud@eQj>C?DNwhs4y&#sp z!Q7pcnYqHyjGX~vGT+uKa=@0EsdUB?%y?ti#L)08^`9vErTQi4dF73Km9=rC*QmoZ zS)TPp|5XCIJC~fBLQzq5chlB}nH7vA4gWlhz@+zif-qCBJ5;zNMB{5{GAJA?Ab=Uh za!mJi9(~2wg9qN7Mgmkg&RtHOlwz^Q#r1rlwJtZ;L)9%vcz9%&t!dVRT-2ka~pni-0{rQI5NUoHspvc?1b3rxv3t)xF@LC&oca2mQwy2c=vOou1gfIPm@bOYVwwU% z-6$Fznm|Js05aWB8?vDo-*vCLu`w8}<76KJ5uGz;3Sj+Ppjt+$3C#*klc!V_9Q~CF z1=F#oeq8&*i;k-3k1Hht^IfaX{s?9kUZ`)`9C`MBkLc5<&l2uEn&R$0RIRI%K3aZltaq|4QJ+eCB)K~QPU0Dx z#BwQ~5wReoNFdE0f$#{8ng>4qDW;@QI}LqLCuB1^*pvVG%W_jpIpK!HFH$rOHsxv9 zwIHD5M?a+t;sG|%!G}-A`7qY-8770nnjB2M5{jr&(1dh6lhG1&7ka~|Sc+l5XK*!O z-sB^#Aqp;@J{ZXn$T3<6Zb~RfJ0|S}o$oP*TR$qaZT6q;Th-Jt=i}@7*h*fzlv% zI?*MS!U%c}38trTM&K@^vp2OYsQnH81Pjuo$tKzoT1bpAEieYF%|5; z0*fYq;pn9axqgx~dWfzz1c)tZ;!rQ(B?EMl0c4Rg5i*-h99saq7T~W@enRK;3$2zb z5WZv_RoG|YK&`8*Yh*f!p&lh51eGxiCU#Q~hd(2*7x{;D(qR+pg5_8T z4Ido*egIVH7$Rj5P!P@W8d_z)KD$Ua$8+{u1ezQDu;W^PcAB5>X>ZGz&j01SvhD#`~+Yot_z)X(?ylI@hoNs=tpg# z(;690)Lg@@TS|{73JN4}B;C4&CISLSP9z}Wk;Dr*HSptnhTR5#T`ltY2JSQHp`?5F zzQp#gTDdX0>R!So zA%j8%SAzPJf^E9+XsE!O9{_+L7Gq#aCi1Ax?HXJt;qeII?WY?4rp*swhs}`kUvaRV z@w4hp#Z?38ix=b5j-SMCwR^WephogY)7{6lcVb(Ik_s`wo6O9_TaS0~sr{_hLU~Tj z$LaB2b8F}mfu{tAgdh-x8K-?X)o3HU>d0|F0~~Zdt!otfP}T)QG%1Bz0#c5$8xubl z({TvaHI}(H{_;}2nVcHM7!7>^NCNag!Z_YB|Ke=9RSAJgVn-ee;-C2g;{$wwybFD( z5eaR@y88w6#vhdj;wq9bZ~HUrp(4S@=S7uyAx=WW?Jvm%y`t&)nFuTJC$AL&lNBsC?V_p+PZZl2m>3iEN{sIw_-UM(%nL2U#QB-q;f zHvfdK2^yeK;2?wv(#xUBpNjhnVIn;vYmR@Dq#?-+b(EASbk4!ci51OQ^JfZTN~V!h z3je?b)C`UOLG?+gZ$RfV31z&xqHOQF7MS}yoVIS>jND|80>KfKNqBxbuRElb;A2y3 z*~FAU{unh;6oN|oGETQMK(8^lQ_yFuOCl3R6mVt4{VqUG2*lLXN6-(EYlTcf9?$o# zgfKaMjloxe&mXyAvw(9`=e23Jnb7?R#3u%q#jlW(?-I4U#3Mesy})~ac2f3kA}J#t{r;qP@4w4 zILPaMl&+Ev!0+2TZxnf;La1a5W^7};a&{J3tWq=3>UAkrj&w`o*2x_h}9EuEpQKfJYZ zdc!_l(~A#wf0z=pBxH@55m)w$^S^%O9kuOou|d-dYJ6vbI~e_^2^#+F^{s%SQG*kZ za_=4w_IJoDf8nHu4s|((W?Y!3OCL&cO?=rQMFIb!4bvJ#oo@X&$pvZf5@;5J_=P)) z6E*u<3nXyrggnH=*AW_LIB%WDbjSm!enFv>}=QWcpRFgzys-EWv=)f z)ho@+a-%N3F{8xp@Zs`^0?#g%__`Gyuk5rdP1Pz~C2+hijfceBqZi^`j60c1e@#PO zR@qJ`!$WAfAFhGA&?XW^6&1}#e|>e0>jK~lBvGe$H;SCcCWVwDiX)=3bgF_X|*fh5#o-)wA*3QIQBu9zmgf?;p*=BZ6JJ)R&C| zo0nKp#0}ii=QjL&mG=_X^E4-3y-}@5L2wj0vvSocJSEO1=1>;n4uODr<8Cd(Q&Cx2 zBd@}Rqphf8I&w_(4Gqsv40#Jranca@2BmNpGzaKOvFU2z5hjs6x1TRkcP;Af@5w{v zz&8R`aRX?mCRTced+%Ny0yVcWRK`6gzsuLPB#NQ%uKLTS@Dd&bFyGdg{)d@?A z*Qn$%XMxqkgPP$(0@1SKgFWjZNdVzCgP?OiNlBcnxG8tj(sY33CDi0?nC>U3E;08U z61mpAtK|-DV%IVeTOf`8&pyh1vjF)UEV286V8G)`Whk8M0VE$syeHvXq`Rlm08KDK z=7TpdiX0QOiV@%Jk6pV9`6JJu>spS44C2q|E|B096E7(t2)p7l;@W}bLrc#F0FUZ0 z1`Yipc|R4w*d$R`!sSc-v=$gOLd2~RE=h{JwEUiBh+ddYy#k-gdw_wR6%o<20MbwK zRQg4*D!&_5Ggs@o7JU%WabybBB$Ct#+W;M$I>Yqd^qxUuR?Y^!2loYq%CTAk#l}hNHHd()DJ? zx$ZB+IG+1US21V>;QEzdxe%Fr{kkZHhZ0$d>e)BD8>ZUv zZ|z50pFjuK{rD@sa|Xhq2tP&4g9e`OnkPPg^(8d1etMcexE;(`mE0LCns!UNe#h~w z__4_ZERcwhuoL)Zjg4`BT{fy+!709`d3-F#&u{kW&inm?U8SBwjLDU?3i_%!s>hZD z@{5c1+(v^hxUVT%{?AdNbr#Z{XJXx-HjKLmU)zQ)BY4S@_x0L;E}BpEd?39wg z%ZKT36mEE%?JuRb1foz>wiZgFE2ztwvz!5vl)7(NdOi=Y{O*Rrjcx>B@`o|9CZJ%T zZGwvof+wPB0XK6oP_>~G$vS!tmGN}c64;23{h0t<+lWY7g!lA&qvWE_G5ybnaBx2{ zudzX0kanPZW?JAeEw(ODtaV+k1F>DEACs-=aA#SRPIS;o|T z2(gbUA=c2uiTI4ZyYUSo3KFkS?iWZnn~~q}?^rH<5vcdm?OK;&eE^YU?QXis5cJ@ zNAa|wq~C;CehQ|?CPPLJ;8hG?q}@rQ5y0$mimpQ^UJX|dd>4c&89#IuCt6EMS(&E4 zQKF1{GJ4QyL3$f55 zj*H!H8DHGrKVD_cyN@>!Tq3xg^y=|{vI}=&Z0j9Wh704 z-VDMz4%EuO&>2IMJp~;X7y|1OzbU9}OEBgGgNK@OaL}+ngW4i5w{c|#e8TeXxrFhWEL=Ym2Epn zKEfz)eCwN;g(1ppCWt27kQdZ&&cs7QhgX zBZ7)Xm;+#P@v1*RFIpb?1Jc4fRx@UoXVeryoi{&{@ zTa@~G6V29QI1@$$Knvv9iU3#bqXfrR=X>CvOPm^?V2sAn9wGz*Nb4ppBhp43J^Cm+XLxo;3~HjgggX2%{h zi2q+Ry+I2E7*rfYWlpLEJ}UZ@;OwOlwbD{jY`BFqm}@zrU`U;t*AXc8jny_GLg)f2 ziA*$J5BBHsQbZ?-RPY>`LPbu62tFHWrEupGm`o9b25M<(anNan9*$0SQo^Bwgp3_W zQV2<^&^}HsTI_AShzYI$o>Ps2o$?Uv?CiLQBCya6jA}Zn+IpAZlu}?5akiH z%`+|+HGvy@&my zWY9oZb(996R+-8`71E9qW6s9YBuc~v8;pfQm?BZeaK5k=*Q3MZ#zK|^Uj{S*5_LRn z0D?e@lNd;n+s@IElg9kPBgxWfO0675`3K^e4K)|6CFsjRCelB~iaLu53(Pn$>$Y7AGg9iDK{}M;S zaV%qfiV2f7q7WuTsFnb14?*WZDjXEDgpz`{#^gF6x41NmcE+EF@{pTY8{phEY5kZK zM$>%g&5$-nW0o*Q;=l};xD@mX=?oM<3>`j7@c{wL7iQj$5X*|W>y$wwac1m?PLZO!<+qbdYmH%DC z;kCO4PHi}HyufL*L(X{9MgHw6bygpOe=K+WY{5ErdP^K2+&oAER(KAW(p&jST7ZQrwszuC(s!kD{isRuQG|{7)d~T1SP2v>rgO|KMgxFsoGHFA=51k zXKp*1y9$8D5kza#GzYZZnNF>1prXlJ>rCV2u#cZYElUtTj04}`-L-mtK`QZ2 z3AWju{iQZVj^&GY-j8$qoM!F!N_C-1R7>F<6=en?tlit=)gGxUJmB^7Md*O~tzCek z*FL>b_LqtTrRyHakG5c2#BMZ|)B`0J>NIZwf*n%#A=I4e5R?XJjA)aD~Vu9e(&6pPklTA=jcgDZ%1MjoR_#?*f^dM zB90a|3CIsg5g-si-9S~LH82kV}IMO#DATS5W@y^t>0=j!_U zz-bD33;@jH0Ebr~i@`*KDl8n}NHyRg)d>nb(rn@ourBsx+q`+RFJ2HI7^MKsipPH% zn^WrSoo>k${U4p1lD5J0p$Iwb&yA?kv14vGu(a$@h~;}E!bpmVx%qAC;>y#B#~RHJ zSCh_d?|*uFWkzZl^z=idr#Hg@jk<822g!E|d}+8Lm@ zY-L}6et_Ng3@v&8seBY@x?$p%H9ZX&V>wtk5%xc!n|;jui)dV`;-4s^4wFw5p@2J_yZAzv!2Y9WHXw3Dvl5Fa*(~xz79fRA0DB z)rM+!Q}8Y-%x_O;?QF+w;dalazG$XTJjdq&?^CTu)^IshRPA436Lb#xRywy5optp2 zKq=vK^1830KVD$e(GY#=v*VM~FJj_fsm^@mE2dCBfFfR7X#R&RyT(sCJN-PCuWL6R zu`=VY^xsUsVOt!{;a%uad6K-j-Huigp(OuF0h1yCPpYYAk$%$@9w*SQ+Pp=hw1C^#gC4sKKYT{ zd6+RSoD(<2+GnawuSe0u;o`-7d|_)`zAfL+m-_tm3!OJ(YE9E(*FZZy5s;U+hK6f1 zpo>_6w&Xn6OH_Q|;eS<{Pjnt*fv%&`Q+eo=*5@};d1bKpqk^Yb2|GqN^zHfiqUiMU`m!p~ zLs}|{5__N_;P_+d;)M%45J6u-`eRb!gD*q*q2cbXjNJ~S%ld1ctJ1r3EqoytlN=(_ISeV}ch0FCqP@HQ6l7O%*k<#z4K$vs-^<}O z^s4N+V3}azyQRIwic&BM)G*y~O1nIceY`()!=Yo%s&9B zf0npLj-H(otBp$oH&%vl>`i)aV4su9$=G#w$3*4p7bQ{AyS*)aI8`r*ucb*(zW(9?+6sL5cvna!w=OcOH{jqo3Ns<)8YN@$2xdQegS z%o+=$l_N&g!=B5p{IQ+M^bsYMP?GOVHPiIV4~+tk6~sO9^GGO5A7ZRLNXXR1jVDBF zr`%#?9y^N#7_92YuhK4>rm+qWBz-7@AmO`b8Z#8d2S-X=9+ZFLR-qp@d|DI< zEj+9jSvq{2zKelH-m9QQ=h#*Fx%do`po~dyYqgPObA#R*_R>oU;;)YIPOqQ*lIgo6 z?6_8q{Kvou$SH7KSTwB{;Zf1%~D2Zk#eh5kG=mzVy?{nYU9<(GGB zY~>%x@>`#j`cpFR`i03GlRy8_{yvLCI^~HQ;GZ#d_e*BcE0{&Rzd6S;hHo5~CfmP5 zvEJMD=D#nHB~in#UG=j7PP@+Di4V3bd00o}|GjBsP_K7K^y9fzx1`#gf?;Z&+I$noF|zbPnRfv>ZH>Bvs2OZFH;;&@sztsH{jg$oxFS-uzZC4Go{39 zY$X@bg7Ud|@JiWU-KFAR1h_8IpRg~|cW;b$PWd%dq98svESa(T-&cd++^ze^iDa#M zyFb~|^!By6lY1?z8e5>5w?BFxNZH<;-gTbeVDCK0<6;4v-W2`VB1|TG z$Az2xg9`)cjm9c(bThn0H_V^bogm+^?>4Rcq%>XMQ~W;{N=%h|d~#-v|6G@{TaEvS zXaAJ9of)sfNSEVLJji>Y{$qw(@Z_a+aa&v6ARFZQNYu%QE7EfEz??6W2M=5%fgF7Sl@tbtOI`-(qHu{0t)`Y26>tA0BF_*xGQ5k!7U}B#74PX%TnxjUDhbwJpC2@@I#o(HANFwFdU_p>q2N4>LNqSMMF&UM$fzC$?X50w71GjlEuh}^Hlty>is}^QV z59%;Z?(dUy`){v0Dtx=JFhALy<@*hnzc)rRLGd(6UA4s{Dnq!=TArr6l06u!OY1TC z@^>5Ubrx&i-N1|UcBb7v#8Y|CgnqH)Cb(2f1J%y7GZXhym{n^QL*-zIq(n@ju`xr_o(BEj^M0#$I z#_dm~u_|Qgcr$nUR3#-PpzCMzubsa0?u&qg#Eh_Ky~fLC7Z<*WJpK9xZb|Dz6)VVb zvSr}%WAv#WF8MCOsr-moc;nw`ks?Dr}w z#T|Fn?79qIp2zj#vY*=D`i2UBcyurBSsKYzl-4FvfEg9-#5LmVh)E zzEF&jix)6=;QrJ=;JygPpqY?>s<{>l?ccggtp-LAgsCOoy~_<$htwmuEFAUEaLF;Y zcN$Q#1IA%G(b3TuYQzb;I5Y0lFOU<-Xq51(9$uy+eNFxlS0UR?w)XYNS??-Cx5%5> zhwpn=#Z6#?&ozjj&-o~PxgnO{lznhHlMx#eOQuum010Vt^sk6V8+M;FbJR+3;^5r3_Q$+w}856 zI0SMhHQY2@?9MHCp=j0vh!?ZFG0qN=e(nReukQWs-xEErr#G0nF-iKK{S0DO>eG=1hmTtrXq14)ThwX zh~s+W!Xw#FxW~5OAP9aVBhRYZCp?>}Q9y%6LCfK9p((`G zAjBRydCB-W;#wFCHN<)v7kH$mN`Sh?R&yL)aJq{$`jMji2*w6jXW?G*uGzup4|kr$ zRHucOYvqc((HpsMs-IQ@Df{D=QG}yGEdlO@17OSZ7cWX^O;cbWX{DjS!Bu>suhO;@ z=plAX5VsFa{$lk_`e?!LeS^c7^!p%Q&O3>g01G0Zl7WVu*FeY>%`K%7K&8jQz+t4Q#62c)35`P_xjj&7!gw7kb_i`U>1WF5 zQFsAVy$l9or=CLWduDWW)MAR0`Kp8z9w+ul#c}VIH{Tn|y2X7z)`rJiND`9s5ZSb1 z3_Lr7FoOBTO)0{JB@<%_;?-&Pc^dT$L1y@YLN8023!KIBRUIWJ;Qx9gpY{YShWy zL_%MBZUOUKu2al`h6sQoKwuoS5l~|R76Ld(*HE$W=S+(Yh`ma>23ZaH1ihQ=&F+nn4fbb(^ z7{dgy(>^1&86-)x%Rr34Jdx}JZr>6xH_;(i6wE@%`GHaZ?w<{WcasVU>T2+U#mWt) zaE@1KAzG83G%qi&y9I4kzXrR&)ObSj%*qFWZB3L3Yz4%qI{`^Xc&akR_(h1Wj(Fz;XKgH-H#VRMF+wnYuxCbAqi+O z{j1)296AZDmj0u#RU&UExg{T8FBzLJg9N9PanT?Du)a-VjSHE@@_=v?Dsx{UZynp) z=m&RyE>I5P@)rRPiE)S4~L%+8RXT|Dk~ z&fSxJZce{q`oMklmL}^%<-Ic*FRU`JmURS$;ixb)T$XU;y68#SuwQ?ShJwTN-Q%T} zb~si~JbHHa+zC-_drp&v2k&0j;rX|^9qPO4+P1qXDO$&l9V-|DM+}!Gycd~u&pa1F zu7u_ccf&>1>qwkija8;`f}CQhy_+QZEZBO)G*6lSS$ZsHkUW=?i~>$A-e>TiP@sSenAzJ zQ5bc0Q2sSG%E~|jmWT8&92v~YcuyTXT&PbJw(i=t=n`Zyb>Awr9?jLgye;4I4NLOg z@dAJ@vOKNBT$*}(Oil)T1{>bJfA6?u#}3=-MGKM?^wZ}pUXO{{j(DX*?N+>Y z1t)iMsF?P>SsXvr$%w7F>8M-p2i10aB{9ZLY^c{^a|<2nGbWti`Q9Bhk6vc8Mhq79 z058tXpXF&9m@UvMjJ*?owiU`nKGW#%yz6h@TzUlG@r|g*2li%cSm8OeDlKrF)dIVV zdk?msTn&4|y95Io!B*LWxp(K~t?Kq4Nn|zkp6ZN9=eM(@t)ee$K=!kq2Q}Z?o&U6va@5mU4eK?eG z&ROe`^_ffWtEMcj^b55)l%Bm|@7f31t%m$>?tQ3_dHrdWxK#+m*FOXo<%-mh-SLx9 zM6K|?ZvMXziM)i8VlcwN-rj>PqrI;H~tv?cA7Ns)&bz8;x z@vqI}BUAspnehDBb}g;-uRaz$jtQ01U>S7Had0Uw-WmV?Ty}DkZ7d(lHpu1=US6V^|!(Z~kHVzBkcWU$*zA^qae|$ve&_-Lc!oOp2gFb(~ zhqn%76{R!Ovus!CMatkQ!nj#Sg!MaOlvWz+@rfT3bdhu!9~s+RBr-nwF-@Oe5&N=t zrS8M^5q``__bvX+0t2VqvMkeT59Rw#e^1HzauyztNO|{VL7jbW)xdgA#xlLgzP7e{ z1-(eqAvXTX?s?MJ+7%kdElOsUG%hk_l}}{*4jX0LEm4MM@0&TAPBZvDH?_%J2i~a!={wvecVGNt7VFl;CD+F_%_oM z=dw>$*xczIb1xCCIC5gRcS^y@3a1ae4XJU4Yxf;8JM#B2cLI+(LarTb92`=t?Tpqw z@)XJc*i+F`m%zHztB4C}u;!@p{*+_4*E|wE-1~YTd`S*mGjg%Um-jyJ+3%Ic`I$k)hpeAKQXW$Ct19)8X-O z=PlOe4bB`YZ(j7jyD>iU@#>U{`|Q5JLRwV zP}{4tR-cI*HG>mxF06aDWwa-+@I9Z4_+{~iwc#5I#Nzh4Y2M6U{b-#+yy_vbu+Z7O zD0_>SWCdBP;xCd5C<9>u;+Q3JkBTnolTHz`C7fOjG9nQ`eI~HI*zI1Vt2C+)zRQ z1qWnTkR`|_2m_-8bub`7K-NG45kev_LLQ(4juKD^ycrCOfT&0a1QTMwun3WH00G$& zc65*>fFePRN&b!Vo&Wxy_wPA3-RE{y-R`Qc>guk(op@YYX&+!B65~bW(fStOPq=|N z&I&w>L05IlNkUDW;R0K8c+d2oA5r6HMB}Fh2UOF=&)%l75lKp}vbCi|YwB;li$g51 zVd5S};6OCtg7);j#}K{=JZ;xXv!JI$U>3~BzaD{da|F>p@<+B>V`6o2$Nv>wuR2VU z8~2CUOag1+p;8J(ZT@k2M?ZR$ymsvpI8qcWmFE-fAQjRy4Zf{59W%%2Lyrp5#ml2& z)iuukMdHws)7{1GW<|M(Q8su0r`+BhO*73=o$|W>CgGT>wH9yw5ohV*jCxQygZWmk zC_k$~Gvs{yhkN2KO&TRD>ZaLvTVONJx3c<;?Z!=x#5I@SQ41@o?<|xMo}O-4o8X7; zrHWMtdrrZZIZTcvpoimHmDFJ(E9JVYbR*7vMO5>~j@2wc4&&gO({ozGMfrK(oAFGC zh{Y$EOqT#84TpMckjhU93orx8OTk_F_O{W#B-;Zr-Sdb18R#WJkZtTPWf|Rf^MDPC zl#WPOM(C8+VDqRt%v~LSfIk++VETn8f4zKD1!(>i;&1GSw8f$wWNZ`t#zY;7u+i6>g z!hww?bXv7l7~5Fq^6qy|8-V!LH`2?4DTro95MWaTk8VBpSGqB}dap26|(wTU*x=$S~0>$?DK46-IYgLDgGS+jp^DrOwVYC@jQ&17^DS=YSw@p#$ zPT7ZODi~GGHsF7%G6zh~^0LI(;l{%<;Y1;NQ!f!O)wvcUeM@qIODG{yd!m#q?DX%d ztNJxEG8__L*6U^FGZP)gAIoGQL}L(p7%BUlWEjXY>E7gM=90~4k%Vlg0JJW?apfKXD56Od?#%z>yqeDaKJ{4 zHX2rxD_Tvj%GIk@$XGMdQ0xM>pG#`RQH;=$OtIr;oAsO|Wz}NY88_R2uL(G$?7+<^ zE_bPwai_UD>{q8l75dLUPWYaF)<28bjE1}TcPbt8@}^$4sjF^fr3N6gvLnf|RN4c3@F;;qAq-7409EMyzZ+7qn@R46P z(6^Hta92^Bi%XMADxzJOr3Z{jnzF6xP|>Xje0|B-FVU03(#hbj+*{#aGZu0ZgiiaO z@Z#5a@ge-cm3*-Jy1tKn4Bdd+Zi;saSW<9=0a5|ap(ty$$!Y+@aC;fmid_HB8}us2 zr)=A-zKuFEDb{}1JpP~Dlyc6%{QRbsL7+$TTJ6&$KO`P~+tB8t22V`pMdir`P!lP(enJIiP4 z=v;ssZV(Dv{bGd+I7hYBOP)y&hizgY8uCX+3Gr9E)iaao$F8IB^jtrW(&`04&DJ>+ zEU+5A2cd|DReo5-?k-G~ExSXZu;_Y0fy2Y0{HhV5KK5nPxIZhC&6Sf8fY@z`Z59_F zTjYpsiy94W7u)<_FoGC0y<2!aM$?_`NWdZL?1}-%WVZg&cT@>q*0S`QHKHrMTK|eM zWaOtdg0Uc*Ju{W>RJopDleVKlb@;rnj5o-Q*XtpL@J9?8qn+8~w<2CT;0EEc*cQpz zjBQ64Pod1z0pAOHEilJ6UxxkQuK$bhGlk<(Pa)?vY@4ADD_$bEL7S8O^6#Q%PM>Dj ztPOvS8^89y0)2$$EdDTj9y%>Eo2cx*6hh+8axyRiMdu9$Rx{{as`o9dd+cuB`q>oDCs_=4_>0IWJ)42DO8&K{QmQ1nwBG!gmL=dXMM~Ssq+ZO+ zqk{#Km+1Zx(ele9+Y_a2Yxs`ZD zW;Q@z(kp0T-&29T{yN&z%z_Y~{(Gpi|0|qucd( z5W~1HyD|*;&9MP6&Y8nIhI5j-YrDMiOM@L`<{+}PyuLM8JuJ?hHPw>Pt<7GzS;K98 z=a9T#ogB+Y&=oK;fr&W3}^(sr+K%HbLfBFIBY%-MUBSIKPqD zgLTOS%4q1J(tx=rX5|#mmz7x+9+^#coXunE@x}*ZjK!0Av?4X6(>(!d|F)$-Q`eS% z$ZA1t%{{vlCeOSxxoGA?0N07T3nt%nK48p1U0R@cy?(DD7Qrb}uomGaQ;7$9A0pIV z_di98xPV$Q7w(R+V+yMrSp@I%C25S&)ulA5O~s-31@g5dyx_x7E%1he^QZC@&|Iz; zAa12MBz^-yhs7XV0A|=VQ%BT7_$ebn>zLZ{!eF$tD_c$$IENa;zI%*eb{9SbQJMw| zdKXHrqne!#^cIHzM%17sU@n0=-jf^JDvRT|1!mc#z8>23okOzsT%CFH+24awdxvO( zPkwWm^?gs{;&_wjAXx+)x{J%17HKv#1qv&U>%Tj|m;7_XwWO87o;|L__tGZtoxXCT zqkq>{@=qbK6Hg3!4kMWcHJ3Xlrh56T`wmKN5+kMe*umZK8ZzQB{)=#So?k*ok0uj7 zCW0#_&Nhv7jKnkwh#toyWd0F{&Y(++*4hg~_o|53!@$IKCm-%DX(YCZXbE8^w&%VQ zbcV)xR5$GyE^6$q2v$VHYu;mjb|5Om#BpS&ov8QvOCF5j5VJp6;0NxP+zJcWeDM3v z7iJevn?9*21@4yHU3bD`6tesPZD1{5Wqza6lzaF zXmg2-RTPT(RR2UsKrF>3b)r~|HOg`6?T1FfidOZl6r;aW=RP%tMKkhZ_28~==d6W@ z0aQx{Fdvex2Q4K|jZ2-i=zynP!M`H1?;j8?e}ck9%L_+BrQ&`fYp51z>qISMRD&eI zuW51x@9_4dE)p1^oMf+ff_$>|;=8&n+$Cgs6BwFbf#D;VY5Y=&1qa z9l0`tGa&6+oEPJT040O<%QalPEH!Q}w4C7&pbU(*0j-+SnFVVc>-2zW$C=YaF2Yh(G zB3Y-}ZAAIbSvINp`*mhT?9`H0V_?3w<0A+J`Z?UsGvf4V5U{@(mKCJ-nC2~zlP7;hkzUBD5@Uvd6SXyiV z%Wpmpl!08%xq5m6>ABGO@bK7#s5q<+A>0ypa0q+S4;vPO4Tl0HKms~yan#Dx;;88n zs~;_nLXTNMj~y|#uz*@vWTr)F|1Sh|To^JU>HkOAn0R#$KrsC44Ok>PJOLYm#{35y zn?GODrTN?gz}YRs3BQ7jfSv??U=a}s;aHH3t%>f%yr1s^<>0@{eUMkeQGRX!<=6yd zTrB8_?NKWe-Q(q&t$zpe|2Hs8Yiko-XRES@0NCEYsQD&@$GgU%0bSI`*fe`-Zs`#Sz_F#o3sK1z`?@oIjf^Y8WlA5r&D)?Edl OK%OVP-KZym)BX!yWK!4w literal 0 HcmV?d00001 diff --git a/doc/sphinx-guides/source/container/img/netbeans-run.png b/doc/sphinx-guides/source/container/img/netbeans-run.png new file mode 100644 index 0000000000000000000000000000000000000000..00f8af23cc5a0292622ab9fb26d950f87becb3c6 GIT binary patch literal 124521 zcmafaV{~Ne7Hv9q#~s`3SRLCP+qP}nwr$&;q+@m5v2E+^bMLwD*Bj%F8kI_R?Ol6) z>sy#}u1fe%Suq4yY*-Kw5CjQvVFeHn&{g0`3=Ih!d0?;=0|9|&GZzy2X>Mo;0wNxs ztO}*3IEs;@nGnZs_N55b168;PJPt(=uA3xGQXDi0#gMRUc~%5si5{aazmgbMg`bwv zz*PRsTR=oW{TrOiz6|V9N3ECb&E!O`@6_AjM7!fSNS;dq3Ce!K3CJD{+^_onk30KO zDkp`n{9hnG;QJ^M*>!(K#UMh}!QcBbT-tyNuj)F+S-d;G^U+BMCv2y@xU)_FM=@NGC#<9edCLLbf0d@iZK)nyq9gb!iCS9FmmhgS&52xIo)0^j#v)bv9*C{^@zGgA4QbgZOO{P4>A zpkNzzg!Z2MHQI62-|Xm?(R7M5M?o(F5n4r`X>fmGEjo5S+Srvs$J!8UzT(<_I|>bd6`Zb#Gp5=}F$w-ieAraE z8eI6f9H0H0(j9yy*6Q|FTA5pUNgv(3#&;t2$Ku_#S7V5n&%^W!(d&=p(08kFx&cIk z!#c%fh%?MZkRvtb#js}mkzzMDSDyIu8Y0{J1~-Sim@R^jkeVu%#AjJn*=||gJwu2$ z^Ep!%JsAv{Z*WYFut(BQqFeJTf;GG19gT^Na3FJ!k#E=cqZ#_yw>qypy3l(=T({q!*g=!^#Oo;BP?m%2gQN$P z1Yz>C>H_W3>>xL!)`MJyiRDAhWSl}D4VV}+IRe{)+Tzv3en^}V@{%LQiIegq@Qh}n ziR{F|ijx)H7x5IC%qc6d9|tBhB!nk4D#R&7B>XH_E7vk_or5W^l$=iDO|apMF1r>LxxAjTl_swQqft- zxbjBnuuP?Fu~J9byIfSIQOU4EMu|p&MwPqFMZH=syUM0`O5w0u06v53uvKJL zv92Z7nfv+u)0C6N&>j)}wG<)O(m~*k^cUxH4J}^D$#a+DjUoyiz7Z zCTG6+SlWZ$8?QTg!`8wK+kl#J-D1_|Wo2TOw!dn*YU<3Nb0|wcN55yaYp82;;1}_L zL5WE8khW9<*&x%nL-juA_@m0Aii8SWHFlLpm2>sVqR1la;-hsLTMwI}RfSc}8j3Ym z3ug?XttHMt9EP}m@yV$NnsxHK;wVlJN}4q z_qubsrM>#G^_-d-s2a0mQ=3!2X!vZ{8tZ*#eK2{wak73a^eAz=vNL~dc`|-oedE1n zdNFdlw*44oK0aY`q2F`odh_j-;K6tL#%R(Y8@>-T08A2W6^s=O6_g#E7oG`j488*9 z|8}~4J{ey>qFAAj z!qA0VgqMdegkL6tCa91*$KbHqEVfCFzKjO!4WlVY8jQsx`Ct3*E=66+I?K>WbaVE5 z3^c)fBR`Rw$aeEeSS6h*3N#ODDr#Cc*X5AUqMqXC;_hbZ&K=*W+3(qUzE-*Qc;0wR zelx||z@lOHVCrU8HGVL8Hc2+Uhfxba2vA)iTFGqOJQUtPo4}fo8LJ*w9FrR7CjNtV z0If-ZBbUS0E@Z?&jqEIYoz-9>*~=l_MM5x|8@H*J2f??uM#`3;OR62r8;l-IP(m)t z!y#jsVW+X?)Oj3P%0~lB{bCKt8^~Q~Tyi|&&fICjXlk7uJt3RB&NSjR6~MXf79tD)DVcM&ks3;wB~SfD{n-`+f9xqTq=MBHESFl2~PtNT`+ zr0P*WQC`(`HjZ+U5ZN2^;k;fOD8-!8!r-XcV$NybZudNi-b3S3{YOo`>E^m3O!>a@ zxKc{BSDV>d3C?$X=PQ#SSxZXx{YTCkNs==CT6el<;Z0yZNgvJ~a1?L!5oxL)?D; z*kIFO@xA!_-OEQvRmjxv-f#>_zL-WvR|X=1^N)_(wB!1S-PMuQ?7S>BJA=CRGWV^b zBcsjo3B_1V^5(zXJQsKOi)o7w%=)HNxd>dIFSxf_x9*p>BlK=M91ijJ_1g(+#V>42 zErY$CA3kmC)!iQrulvI^_^KIP!!B8EC!Y6zn*A(IYa45IJCNNAcTEN+*IE^9oi`5F zFWM754;R)4I&&{4yXD$+U-;hzo`nWOm3@~#Y-MeV=D$nKr7JA0E$LjtV^}Q+f z>{mRQzcD4BuJFwAjJb!r*Kz0laDRlGjhyx-^R;+Wf1W$o&+mQvus9`H+I*LMv$(oX zVE3?f>2Y>nV?yG&y3*e0oW7LeJMvz>kH5bDTUsPH%`5jV_|kAudA?L)tG{{i^#B1e zg2N!@Oc5jyeeyTR*1ar9*(k`&)^ik-j?YITy0DK)GSO9T-CGnTjSa}3fy>7<-X~{A zI6v>T?Q6J`%3@F?83@7+usp+I_@(^Dj594NI$<}&Wt2x&9tOO(OkzIA52VLwp16t* zzuZ^fFXo2)Db8mC=(|h z9RVB&2%@xylA4o(zAK@fgRP0Vl`)}{yPYxNe~61(aCN$R0x1om$fvppL;n{dJgo4q>CQ&-_%Y1T2&$_17rlmUpI7lDkgm(7u|nRwH~G8JA7^vrG*XCE z;weZmN(wS`xMDiNBVL@V3topV-1pdy6QNH|MMA^&zCD|t=Zz~1q&IRSQTjckuc-u& zmm!|D1yo|uOV%B)tQ_d4i|#x}4m!n%h+cbfwVvMOL21kx+&Lew9&Yw!w@SPBc}a)< z2ag|II`0I#uh%_sdg)S1cuR=;n3(;CdCy(CxA8~6Bi^1AQ)`NsXp{(6)9mAC5lo+{w&z;!I4-G}EX_2BdU z_EQ&nAttMbfb53`zJ^0wf{o>ql&KHKc}H>S6|K{AxGoqCq`Je}f-dSiy_!l7Tifg4 z>@s1!fCB+HxGg>&*#@HPlT-nc%(ITL&DXo7VS%J*D4eg4oL>ejROH*w?MEDcizkU# zcnI#?0%N4Q1Hxg^`@mX?qXy%xIl{05K;2ngQ)LqQ` z>qBlSyE#%N5>7Bql|DdZFw3Il$=|!GO08H>3|obo}C&AEyX=JREH&fvq@G zG|vCH3(lx3wwU^yizhjszUg(mC~rjwFIpB`BcJq1~v16y+&6y#QWAK%X zfmvIKf)Kx=pdCT)k9G~Lit3|3?t$$Nl)_YiV0#OyEt8Dn;jl=E!LdL0QYtN-HW{g zljxaGA>Bi#)~>`w^Q)o31Yy5brZ=hkw+KxML6Q59H+H@`*YJU_@gIXlo61`kp}j%= z`<|VAhS#-`Tufp_IKqB>^FOK`_{^ShabHUqP1H8M?tLXqg5J?bpF_ocg}!(r?R@?E ziSf=S@&?ys@B`a`;jC@sm3W2HVDyGq1H~Ea3LQm$J83AP;_p6qI2uQ2UtGr^H0hq% zxiq+3{jI>ei}VLgv0C@{;Yw!R2JoK|VE4FDB_rVZ%KR*|o!{a@4L_8#c29lfxD+Jti6*801(h4MAxq_6B3- zeh&I?>ELl}g?9ne5$U~7aFp0^N*HCdGiiVRd88Zl1lr;~VS%R(o6haTc$46U8S*_LbDVVy@vPE&unK`) ztjy2}L4OuJ*lc6Gd?qBEP_`(VevR}XUeFF`6H+B47p^F>p+bUqL*cO-GnL3QP?1vP zETnsJj~F$w17oGDuw+k+*yz`dnys8DUC}=1JsAwHx{N$&DO!h}reYF1>rSe%Wb67#w zVj64}-p{vm!f>g|Jz7Hpo?vnFi>D0`M!kG%-6Mgic@g=57}^wO`uLR}gG!B7P*VMd zlrB{4&_4JDN77aM5@ZuK?%yP5V!&(*Sbqwg$j=z}%SdkHn@d?Dj+6vS81ENuMJ;Hy zh#SR;9xR6@t0osTV)U^06kc&XO_3*GN5@(FqZANn3{vDJ3qxeL@ec|jXg6`jv`o&t z^^F3D9jDlB2~Tx1(rff23QNP{d99>acz1>Sh>p-~72$woyB<-9S;} z6}J}{LE+?OHXACSiL7q!KTIxf9=vFG9FHp=2Q72N9WPdYERvZzmw$`uq-F)n@w8Qr zT4w$ykr^cNLNPZ-QN3xU66Ua6xBKKuqh$#{4vlYp(zK2!@-YdU_nVopCjRD)yQyv; z^3GqylP84?25W9za(3w-M4=?g=RFQ{tJmwC(P%Fi8Mb95`|!AQv6fFEyzy&~%ic>z zUZz7aq~2_{&_Qrd@ofheP%l`9PyPWEd92|(=?DWABXd3t%Qq(yi&G3v4HZwpLeKy_rf`QzQ*UpBu6+M(g`rKv$Uzl7kqv2`*sq>;hkv#o=CT^F z=aKOzj#+;A?T32V))m>sPrLj%BqP`foQKH*O5Q(QU}l0>goOKS`|f7Vfx|dRF!^E- z$(3-~{%ykF&2Z{N@{u4gaG3+sNu=+X8bd~>9Jf>5CKHg6;Gw7e<6v-S{P*e3P6iJs zJp;Ak2V`J+MDM0#3Ae@qo`1$)YPp7yYtCQ_op1H@xtByD+r_wxsi{Y$Nh7ILZB)~Y z(IO>bqJm_@IzK}*o@1RS$Cx;6swieIN13zUDu#3at@In;BcTpC*iOvLg?RvTXgN3OpsXUr zAF@2kf#9dCV>qOzDg=*}c3wh&3+Z9ZX8ex0nn(xIh=QM30E7l6o_j5#D6M#3!5D_uwq%On)Yf)R%F&mzGt}RRe*)3> z1h)v0(_kpXiBK2)xav~Alejmx2Ms55lgBsIJ5Y>CN~Q-Q7C>kzD^#{*5O%{@7fF-G z4=MJ8N#&GBx5$RrEq(~602%f8Y6g1LQZp4esZvdwMHwayVm~!1KY7_)ZF|-Tu8TB*oI7jaE;Jr8R|+GvUkA-*6H?)>y&DBEe=s zXnoIwQNLzE&!V(5J2gbsdAAXgZKMBf6E?l|FUbc{q)?R=~@ z^~Flo{S3-7&ZHMMj-;qGz62^$=V{Y4Tqr}2&xz6)c~tUBZVo2r6GCRA9%W3Vd!{;L znKu(T$mE0$AP|Av&dr?ps*%g<5Jhtsx1-d{N3bCi-KMhqk^=jV|75A9O~Tp60$#N zL$;y9h^aE*qNyV0Mg+zSJ6$1+dY~&lMZ>}hQds~Dp|pQoMW{kWoYYtxOb<(bo2jt5 zL7SwwNjB|XS>m`5A#Dq-4Z7Vco*Tn_6Bjf5Y=M!5t-nJfG{lK-R8??*px-%a_KU8p zc$SLb%1Dl6U@nnc4W8cMsiYXTVCGd$60^|)7bs`(<02{z-3a?F^QPGOu-i+}sspGV zu5_VB@FFs2oMhQvy&hVIJ|e%@Dk%>WC_hSR+m|WCEf><`EO=X_u@iwIB2jU+b|Zy~ z7=aJHT37)pFm3oggFD%0>4Y6zo49&NH_@G$FVy^6x68PG7AE+3s~AwB{vpFW>(4Qp0h#2({xE8^s_@8D3hlHqyD zqf9S;uK%@8EG~oMdKc_w-eM1DVdEAYhH#^s`<=iOCTRDOHtr(^MvsBWFB$}Sx4otI z^zkdt&q6bg{Ag~LeWru3$8t5&phEDhgDu_mtD93nj+(jWQNSh+HS^z?9veb)IGCU= zFpa=&N9%?5g(%2*+9)bFJf%>i-#A7q2;`fN8)=FBREP+RJmK+V-nE>4zs(t_ZtOpt zjy82De(+H**K!_4h9Ed)^Dr;m%7Bl)V-H;O`WU{wboDa3UwUe<+kWkN^nFu#CflRJ zf`9Wk7lP!`U69HuEko14$=b%Xjq&a#=a*y$~`>U;Ns;a=#A*%V$i4Zx|Tf& zDX(2(|9qO?9+@tGbiX()ynCX(o4t1N{V_Mp(C*Xi`U#dj;}Gx$ZF>#^@`Q` z=@?xbEF>zA*Ra_~Y_@1PhG!|6WJ*`#Xr>rPZdP(LQ*f;kKH@I&_J-))&fl)vYzi`qnUtn7YL>Kd~9WmNKm}23`Zg5P#p0+^cH$w!O z^E#Z(?ObIe7wITA1k_jM7i&N6{sh#dZ{sZo>Oy=Mbm9gm4de(ZFyt)iUt=EY__NKA z>1^E>K$;*fPSaC(`*~wBRPyccWslezITA+(z2M6 ze+#A9fxTQp$2gc;oGn~D&p}D&A*z#e*6rrZnd^VcrgEf?navz;dh-uwJjEM1nl$#X z>=gbK0UmPJpWKQi?Dxfm# z>W}2dKQT45yOU|+_$TI;QD%iv>?26;eBL9{cfIGI>CD}qIb&fTksrA*OHorX>1`%i zZ=8*V9TnY7=gon#2glMIi91TTMI<9}T-WNRV@@KUWL$^iWE`EGYq>%Hxp4CR8#I@s z+`GwY!;_>_H-zT#0Mmxg#!44n0XN;MwzKJMd}rq3Mc8Vm;mL1pHBV#nS#mkoq^9_d zSth*p^sAESjUwk3Y1ez*n+D$5&zy5#(-Ynm#@iMIQ;Sbcsc-L(J)b=9nVib+m(_<+ zjqh12iSM=*)_W>rE`JjoILo%%+^r zb}FHh8F$nCFd7g0c*Otj4sh$1aKAE4rFVb&9#|cJj%s*vTO_M+#g{UV9LMzJ@Vk12 zQ_wVUlaz29LL5ifAJOss8{7dG1*~7F?Hw*~vXN&fNIE=)tz4Ub-j(!B!Jfs0ola>C zc6xgK8z56GT&U=KMjNh+C+jOME*TrdP24mE+`FYKI-1blVv@sR)W(6@9dn7Iyf z{l)TMm!Dlv=KcFW`wpChXL73YR92t=Wsv7FB{9sYY7anu56AOK-=h?Rm}% zL;M?O7X{_cJ;$#PH=egWjRC7`(>&dm(tKRfs?b9hC@XmA%Q*w1P0g2+uNRoi|6Vd5 zpBNo_Uae+BQ_W2LSMjw&*($fn>`XHLJi6}U$kOxPr6^mQ$K_vxO2KMMpR~yuy3v2j z=ys1T#T&ZKyV&wQio3#|#|R@|lg4qA)O9!ecv_gMhJv8|oA~oriwvD4izg|^=jy;V z^zk%zYyNr|4I7Ek@ZvskUC(j52>Wd#<<+{-QiDzNuVKvLHEBmj(S8@w8kV)z=dH;H zlcxCct|dt8rlSv{{H>|2Bw*Xs1g-4v82{b|=Zc(A*pkM;g_I@#Pq+hJ?0SQjrvA5X zbH&gh7t>%Dv7@R>=kfOz@D@-r5U?#zFWrZqDTrsgA|7J5|5lSb6It@RY>G-3%?o1K zEpRJ-9zD>rEHiOu>umJRnU-an+6_d4R%St9HCDFZR^!t=;b)6;XoMWJv`$u9(q~C| zeSJM1-MoU;Do|q$RjH(VCPJQFO$996d4KkHcXf3<84omlHsUWQGPyJhNu1rVH*%=w zb$o1JmUOc5-mU*h!pD#g*OJ~+%}+U(39k9im>gHR){pscrc%pK&2is`_s0j0QlU@o zLM+*qPnl-9N^5&xN>-}8rZNqSIVX|+V<)Z&m(K1nD;vt1stPU-T877^xw(dOevwG9 zrbRb7gDPXgwXxIlEvK+4HNEcUCl79zdXj+-^NzFAQ*MjBt8Lezp}X`Fm2yE^lSZr7 z6+3EJO39_@gn!#Xo@QLkxPyAi&q~&(qkGxTL3tlqhR;-<*9Y&Py1KoYqLJ(@tyZmf zvo>@HU(JDyd+6!DTYO!dtv~8BJr6%Axm-hCpeTX@2ba!Z@p;=N65V|o8-rcZ(v6)z z7XG*8*u4;sGxhZJUTqTtoDOJZOk&z+t?Vp zw`+u1{Vzu0Bocn2K~xSA2ZKY2t>MLVG~C=+zfA}KI<5H8Wy|nWG?mcxzO8BAzr>x3 zxWV}2n+R!%oYEh7sX%qh#+4sz!e6o?0&b1Gnv;jQE2dJ0+`Ukzc0dJM^C=|{eM?KJ zcxDe9?Xw_21*Q!fSL#?;%wB~>vZ3xcbN(uQYiVsalPR1V^Rx|aRW10tRz{+of3^dM{1Az#eH31x5h1*yKCkD|;xLT!9g&i?=eYZNq#%Bs91wM*}U%c!OpiCoUq$W$sU^yvGG*4*T!V&(BF{ zsZLMFDW}T~UImi^#R~0o)Uu_hrSmI>a?=_ujk>))M|WIIh{kOr)!gx<;c;qxNX&nP5$}iB>`a4uuW?#SHOn_`nMoRCNd7cpE89r)pe^YTFT?GWXVb^ zc-9u$>STuS-**l#Hz}LQ?3VHVEG#H!E3ZF@$b4BLMEyScb&yi`k3jS{3(sb-`5q4o zw#Dr-d+2&PBoM>l82#UIMA3_|k--H81`n5OV#>-3Fz(s~lg9GNq%aV{TU&Z1GMU3I z)oYYg^qfR;iWwS)+PNYkBFJdC!Y&WU*b^qLb!?dM6B#^kg|lHp2KhZbIcH~9b6-mg zTi4|3&RQ0GP*>L^J2fm7Emd}Z{}vY%6#P9kHEdA--QtcC6by{hTeeqE3C_3Y;G~C%lonCUH7vO!+f2M`L7y{ zTI1DSV}58D*i0l$v~ReL?LFx*l3(r`Cc2Kk9|%LD7ReW;kg_BM$ybxl%Ebadt84Hp z3z}Am)Kg#(VTdRxp_4WCTgJ-%P~UO!P&dOiuAE(8m+z4vd+8JU&x8GhD2r19sCXn=|D;WrQ3}kEgC3$ z7^qV_Vo;wqYX%Ji)4#OzEpswtaJ$c@RZ&rMyuMJ3Fw@PSn-G#=eKK=?Xw;i*)jm(GXR)-v=!f`JV4lVC?fRt}xugX!w( z0{5MLdD5#_lTZ3gjwcKE^8-<$M&G4~G962zLc>6p*Vc|6H7cm9i%UpAK@;iU+{^`H z$S4`Ma%czN)YsSd@p;1J{-#_j@lw2Y2^AAFOtP5RTpYzim*@5Ia>S+;6$xqUW5S}D zxY8&gl+1Bk#p<- zw68?@HdHR?5TX#69PFeZ=+Pf5pE$-4SmC4nAGl?ehle*ODmXQd_d|Sro62Pp`_T+O zJ|Ah52!9s;WT~lrdDp}9F+_-NWvEVKkGd-+8W%iISzOlv4`_se_?|9ZrPG~JQ9*s6 z!&ThB-GRqr-#~dy*@BWDM@y-BH#3F%m@kla=Ser*~H|AyPoiA=cUNii$>Ki`-mo|5;o_!p4pmGSGj2xi2D+reLB@NI#q=vW>N*p`kSCK^G_4VwI9mh($Ar!akg_Q}V*|-8O3d+FQN@I9fR9-~| zEFT{q_5^}>g0Q3{EKu!+#43yX4fN+rbg5|Q4BvuafY>3I&I(#EAz^1v{bw;i0{kST z;g00#eEe?tKr1OJ`E(shfW=~l1bB1!#rc$xJuTgS10LQ7k`@Cllim8D&mT-Wn=Lsl z4b!S+xDvPG!j8|-|owHkLmvgFCvs+v|nX%?F8x=j}MKcage;bLn z?~@amYg+nA2E>1X^ir-+5fc;;pcs#W|DN|?H*kM{Z@r*8&!aZ7%hlpBkkaQ`hp5ir z{zgq29mFdiT7*I250AwZ#ArBd$8r08t<{=>h$zQ?cX+l`j-VuqIT#35 zHk-d;qJ>0c#_+|(#e4hv28}B#-JV~Kjg2YZS{5wOAp`rZ+nZOc%9N-BgaTk-5Xo6t zlLG?0rg$E;3JMCA8;s#N#T+r&ZE%G`;YENFfraHie{A%4!Gj48NiLHm4A`nzDoe&f zn>3lpruZ{%I*BSVJTg)UI29j1X8u@jd!v@|g_HFVv^bozoPDJ8`Ko&^OHQyjST zaFXMzI@#^`i|+T#NohV?uK4))C~Q_(K3_gI+l`6YEZnGJ(Z~+CWJ!^qc1Zd8`9UEe zJEx~>RhJ}UF-5wcK1TRcMR|E3KwzPvJ*FKV;2ZAN;>=pGMqx2SMh*?+3mtQ0%OVz+ zCMG5dnTb=fRYn75CT6CNUu_REVn?R|r2TX$7Nju#DaZ{dwHTGbw^Q6-qg4h&blLPO znC&m_9AOaPhV;H8VPQ$g$gpy|SWz%B1>_HgBqS$~wpg!$e22uu;_+Y+3V}D??D0NZ zYmL<(3}H0b3(NUn{{{`N9YiXjk#Q<0mxFvR6JtdK&x*g%x?1$W)MPq)Aui4*=gu|v zyjT#$g7*``OqqNz>kHe7UbdhzGapOrjrYrk1KB4-}x zV!8Mi%&sp!o~1wqE)UvHe-%5@ixevWrDM6p8Amx;`FF!ayJxc*3u({_wlw`OvH^T(qHZaN);r(P|Y`baWAMJ80_Uh`Ouq7N-`oW-Mnb-Jw8T zNk~o)7A?&0>dI${ zsrOm;2nYy-{`!@Ik1v<5*O~|@T>XJisOuEikwZHz6ZkwHNqKqvFOQdUYHFCs$jCtE zLn2~__3QB$&LUyphXe=$)dh#$W>5S98L*%K^UJT1k;Lq58E5DF>jzGtc@Tg99v2b< z38V&4G=WOHeR(M-VGqN|$O!Cx$iM(G6BCo2h=l@LFwhizT!${N9&NoMTK8M&tX3Kz zBobm;$+9vsu-UAZ{e}#P*w`>666#j0j0i?ew)*_xkN7|c2nfzxxG^Gw-@3e#>S5d6 z+X3*I$AK<5I1m;cNk&Txubv`bu4H6nG{erD)8qY$f`>O*Z#Yu7c4kxQYhXb9@#Q1B zhEneiw5cfQxIh(P!A3}+#rq~3Gt3M+l8eff&4G*9(AZc`Sxb0mh6uz} z^v6^~)S_6VMm0HrSDHn4bE+uDnW*o>O9dq?&|IMqar=yzis<>+3;SSXj2W z+;#_GvHw`GYE&<-o0xf_l*}K`n6W&WdH+3ELJMA9*&j@b}1&zH_zQ!o=AXpN~LUNy(1z9)v3!u{t*8=Ei}Vh6Vtaq%15+L_#4*I5^`= zy51tj#>JiQ|A3bfu~1S0ef|7S9T9mud;-Vn*oU@wf#`J`3J%gBIEJpcj*j7$(4dgE zUb$4hF41byVg*L`F(VXI)Rh)%oCrlCV2Aw*#bOQEmY6O6h=YQHF86qMt`T$=6a@SM zgPJ*YGHc2hra)*2nzR8RP=P`@B^8zaLWOcrXlP?$tK@$>}Ml z!ya@oSurtfKEGW^Ue=MzUnRZHjn0Q{VS#f=OAm#(uD1zP>w{z1{PjD2>tfGSHe%0s z7f?jP=9(M;z?fw`sj*oHO_s-LXvR(s8i_l0WAX9%#`btc6zH)nwffAt2nv~i`4oEd zbcQVVXPFDshmXdA`qQ$?hOD(aQVb0Z*~A~9^wP7_cdpAGY)hBOW{#b~nUF}JKn5~s zwi?~^9tpv&sIS1|utkN1L6b@p_1pLF9;2mTLIHq3h z>iSxLENQgr)g2G$FZ{dHip2`@Dk{Szy6o)~f7*RsZNAV-01U`}?>BNnVq*JKLgYWV z@7T!!_)Kqumpp}X4%e6VC|uSAATu0+vmzoRN2hoP;)o=ILV|&g2m=S__!`E(=-Xu) z5in#R0MHTl8zZ*2hA(DaE0hcj6nwpYadCP;H-wH2<1ds?bmvHfL8Z`RflSEG4qmZ> zw9|AZlNhjKYqUOXG1&}1{cGB~HaaVa5HV1bJ&Vd1q?MtOI-87q-U9uKr35fKQWNG)4Oe^Oju!w3rt z>oKK^hsovDwA6?RF~fof&=;4tg?t7-SW-1;zL*5M7G%22ty!p6J}aN2Sum{62aUA+ zaK4$)ODSg;Zi5hW!h#bstj7El6Re8-6+JSt4MTE5Zq(Z=OsIh=7u+`)m#-QE3c zqY2;HT3UB~uAcqTWYQl~|mMjIpbIn+=iHV8p*6`jQ)@*U5xy5^Z z-l5^(NN8x_05Su_&Pv<0>e%=gK#zyT4u!0&Xf>)+z@QN0fF|b4zdQM{9W`j2z+(SEsQJ<9@j*;8QgU+0(DnYDblt){#6Mu(9n4m4G}vD1{IYR65Mg3LV^FVbRc)CnxX*w)zHuY z;Cu+|!%n3n)wuNJ@Z4NJe^HWRq=|B#T=kMup3Jwm;$RR-!likVW6|3Pb%RD)0f;$iILb-DlXfU}d=_xHs`RaUm{BKx&Ye|sX`Es(7+#KE2S6~nn{k~UcY>EVZr=KC%wf8;I2o20$aDHno%My&N zo|LUzumBAVxVmKqQWym2HQN{~*43inb8SDP?L|RCjCiPfhj+P8@Y24r{cLW5)GXHq z)GNQw@UBHK(@c7!d9>;ld-|p!gMTQg6n%G)&-+Wg!4Ry=a$8AkkrBWn5$c961+sHHIBpqFS5jmD(23 z*rkKVG5+gK-3^kW_H6Wbg({UeOQ>~ga+s})!}ZNQZ8ig1BX)R-l?w%wuxy#t>a;dX zpW)*J^ujItb}y(X*kd1o$GOHv#@$XoMpwoT@}5`!$}AnJXquYrNe8tW!=?BsRA$TFNSF1es(2gr9 zDJ3}Wnf{}N4XMp%>ZkTOz;u@c%-&Z}!bS*PcOX=I9{^wEAq`>W*Rd&N{i|hbgE8OP z%p@ap+Kk~+YtF4nf3oAmC0910*wNY+v*Pq46MxC3u{8tn^A<0aw0z}nYRc~7Do4xW z!QxHxo3Gg_pWHF1|JGeeaI8z>o=HZ1`K$PR)1pd_lY0U8j#($~Et=%e+LceW z2)_AcTXfHIgkVXz{~-D8H4BXTjm~Zdy-&@kEFPDROcg)j z5laH8JiwDP7AiBJ@gCU_@qhdVK&7YkD=;eAVku-Luy$4A`kGm~K&V+b4&f#i+|s&e zA-As)VJTPtRrAH39eF)MRh35FqPAVf>A{FT%Av3ifavR?c5uKD5McSm=~;}m(!bfL||g>NDz_m1EklgdN5*fDVjQ%p-K zWak_gb(^Lm0jJJGn_`HY5$%8Hl6EGAZP7C__j==)tJdLXDFK~4k8z7;;}PrDaEopY zTI@2XR=;~y=vG}yg(_^#@-E!G)c>mzgZ8IK*W`ccoc}@rdtMT-wuX@LYS~I&u=WD~ z)m<*Hl5Ulbd;8c?&fGfqDG`Kq09P1r-As zsxywIurkQfGuxt)aUe7|hUgO+ffcl|ll zS?>SrrQK<(Ur1y-i#i0C;XTEWT`V}48C-?CUy7G02@E6xz;ZII`h|KeszgHEFHr7( zhXmQhBs;4(MBYqXi}R+Fa~&ixE}qBd&m&ETqC46`q(F%k@bTdVdLTGBI1&LttV-oQ zCdLd|P$!pF9?*|dQwx_)fqH*F*YWSzVTzc`us>vR=y4Rb$7S4A#mp0>OU4khWnw9( zw?#FRomee_RGJ($5`)g*AFb-;ML|<@JPet{?QT{K1_8nOFmO1I5oFW%lSf`&UOI~d z87T5Cf;16x(q5fayEQB#@)_z?+V|}n+_;Dv9;b9A(&+=i-*mzTx9yg;$JQEjRM*$Z zKK?aEmLj7qrY))th&9LUV{cE!X3e?P!}ExH#b!O3@yDDuV-2a(I%UlSQVa&2iJSd# z_mAg830YZkKnM!O_XuO?eM{i`e4Yl@si>k7B3nwr#gzesRSK>8z*c`CaOSYy_n+$O z67KGt{r&ym5D;JVTGyy3Df_nV^jNUJe8-Odv$UkBq!gr5Nk&Z#>vwWu0r36g`OV%drfKpa8*70iBuo|S*9IkB{FF`r;9AVeam_?vs0bXq|9&ZEUKb_Z`%cV0Lzy}A@ zxv`^02IDDov}$!A037%B=D9tX6slYRPQ{p;pZ{m+fHvH^;Su@r;?bl*_xb4iIZ`HH z=yuutsMczO2aq5Uc}!*#cvT&DvfIPy(y}t>(7}Fy*tefGjJxe8tHv3pudZsmy}ze( zyV3*L4Y-pZK#U40VGnaW>038_xCy-PkO5vQlt3B~5b$)W`xybO?s3DDfqm19IaSL6 z1~Ig-`{Lic$|`4B2#~2>nd$fdzXWH?e(Tbf-G}dAtr;4EOB@Q2rW?Mp5v+@8os7u0g*eCB!Iz_TGIa;wkvR*pwDW!+YZQi3Z=f9<^GwFV&Fb9&+EEqqZtbuHF78&ZI}R> z&gl5KAt2tqJ?+MJJ*-;da@c_Yay-Bhp`oD(NlC$hE!EJ#ZfX-b^#@{?PiQdy}<}(t;O2#R~YiI;bDNH3}$gUYK_OZTvh(>+b@Mj zzlDJ>d}vfQ7O#cT|5VqgC&SMV0tN}Mw?CGg<+|a1e(qcO?P^esHbnm+K9o91o|8dwTGJ$lM+b zRqu46Yj1A{urok0)q!*Z)*zkE6rAh(kuDU9c>VsknV6CS#0zs-ZS5gY)%5xUAOQUz z8?c(b>q@Za`2dIa<%mSbeIFgziZm`~YPQVDce!npHD=R)6iL7Hwbn-KHMSH6-I&4c zEnqVbdOzP85=gfX4?|^3|53|A34GFYeczR#t^e)Um$WN*_kBlZyqDdyU9`)K6Ee2n z(#_kIOy-@GacU>$0OqYo-+edNj~zX1Sc)bBh*l}_Bw`K#Yks&`kBp7|`nc{&kVqj% z4!i-ZLS9Mf*F-uq3_Lt3FK-U8YDEPFK_@2`$AgLdj*blNP8VcU)WJ*NPtTXTV%%kdZO|=*VoFm-s)xW~=I# zNn^d&^Gy_B={Z-Tu+zMZ zx0hwdrjw$!%Zjn9*Hu51W0(5VkTFDyMm4|GZ%TdRQuue_*dx` zIjSEs-VMLviilL>QA-Fue}10tRBrod#g0M-dt56gXgb-gdktK~UDjG84SlFHOcDIt zs%P}?67f%sn2c#SJC&1KytKyo_Vq=B!QX3#dWB^!{4*d7<_>GTyH0h-eES_ef=?^| zJE2>oz8&RW;Oe#I zk4){VTRv|?PuQu8dTrd)qG`@jzOmmyHFV|K4(B}~CaX>=(naytl1!c>Yq@_%V&9T; zHeSIzt&&k1nO4I(n zuYV0NHTo8+9Io&6V|FHQO%iad-`;9dYilQzC(!;Nmoc$H#ob*J1#0DNy?^~&Z?@Y`p_Ue;;BMz zZ16_e?!n6yGZJv1I<-qHVCgKvpSXT8Q87-OH)GH?hI>z0C`; zz>gwHvNx94`J}3EZ#nLkeLbxtoJ+tf|4c^n;Fqu6r=ROwxx9#jSLoTkZnxpKN}gzM zNJn#geSP0W$%gYKXU&aNb5r>g9U5AO!@43Vv+ZiF`M}v#%!}02K}>UTaSa_aAb3?H zh8`H?>dBKQ4K2=0yq4~~t#c@c5WFQFJ1(Bp>p1y07G$&OEtv4Qmk>& z1A&$9S2&d+C&n~b_BCgw+PC!4!SZt@25#OtJ^AzIgfh3m2ZdkDfv`lk}bzT5IIKMSf=FpIy6l?N?MByCgp|M>pg`?$zBji*YUC z_<41Yc7WkEmAJhL^)B2`ZS;pK1}oRdn6Ly7<3Si^11-Va33$(-ZaTUY_gbCoED9Y6-vJzp|hSOTzwBjmS^KinNg7Tt0v0 zOZ=aBnBr47cFO0E_Q=ISe?5E?2gePVR2rvwv*TG}g@In$T`QUgT{mpl(BhGaPx@=P z@J9K2-nw@8uH{0f1`hK#id#bjz6v}GtrpAoRQkJA7# z|4mNi%O$p`;&nAO9V;px9eE6qQ}$ClehFs`{(IiSg#jA{@eRdcm+k-1i~jt|#SB*^ z$8Q_AD08mZu;1r5+X~jVyET|+e$PmnjqUgMpW7^8C$#SJwQI(W$%YYrvp?S56AK_P z>iON$$oGc{@TGY8^3*0OQ&xf1+(GdPPuZ(6 z?#!)&R~$lhnEdm1;r7q`sP=npnuKJ9FudJ}m?G1m0`<$1a{2`cvEm*ql3|+zp;5!gs4B;$KkM6*#Fpf3uncV#ZCZbg zQyGxz1@@N=uh;Nra(`4-`f7U6z0hK*nFT9&f-{vw(?w(iM z0(-6nuaa#2`n>j7@Rr=VqbW5x;VNd0vBM%*>F8tnjy9om`=j$}^ZVn2_c0U zs#RwJq|Yv7>j3yLZOw7PwV0hQlV8okq6po%m1!Nv)~$<{FYnIjPwwhVi2fR)Z(C%V z>d<5ox@zM_RRe=f8v-xF4q;jqQJyl)>CXG)5?3L$FvPg>3~8Xvwr>h7T;fkfXnk0G~QHK z_k@{6eICjk=H}w+e!8*+I|yXKtvUZJV0h*_s0S7HBZ~6UwSo_EX2;gPIekO+^*u2w zXv00PH`j;npcZ-xU>OWyCvW;8XfLoLc+ z@^=(WZ^a2d^Q-+&JdQS8jqu^ZKT)LtYhjTK6F$8lv(*a|b3SGd+Ci4v-jrEH%ze>=4tlby#t#fI_`91(Hwz=1`$PMs}I z)!lFTQjF2Tz=(!Vy9Y8m7SLnO1jjay9*4fTP{)R2+QLp>4_-vI{I^SB_6^Tpd8znp zx$&zLLe4t(w_o)yER+XD;tek5sbOvi%K63R8($Mg^u0m*adPrkb-$lXprrWqGp_A9 zw_tRvVh0RkFXKXu-qyvb5{H3%0)pZKq%V+2Mbg80fBOwcfj@d~YYY5Ze&g99W@%1#sNw`90L;-i%#V@mzSC!0bKQdP z-hG49&>e8S{O#M$2bb7ll9R9d{q8>w@G{;RuwyHBkb%?kiqIViX!tofIY$}`XYb&| z1<3@3LWZ~(v}s>afi~kjtqhkKkNz`r$tpRUZcvnvAn5D?@L4zS-BSTSFgiL4VIFI` z`a_a_n9#|&xCoVFjUPUMo^OtrGEHsL$*|Ua{b1iE*Lq(2$RukOzTx~PbCW`WJfr>3 z6VGS7Kbs``K%h3~O~|gm3mHps(n7~VkGDI-Buy`=+3!oHsVDcO?kfD=A z8G-a#S;p>nt`OR$ z?AWcaFlC&@p~ro9flTATrA0l!wl4=K1gA>RC##fh97-p9YaW1n+5wW#0X*62l`B#2 zO^4g^heqXkBAj$mO_%S;pIBX=pzfw0Pw<$29ZMC6YbXO)j%W*b+Q5-^gXH4ZOS_Ci z(|7iV8DI@Q%$J^?RI_R}+E>75*t2W1I{cTuDKFo`!EsK`=jSu;AC9v#J@Vs3b2)aa zpa{c)%PlL*ilzb|D;t|FJTU)EQ@j$-%%<3S{eQWmAU1^n~ipPw@|-> zjwNb|sr4Q@X#D<>1{6qa=dT5;mM&jD{{&o$^i=?~rkK_!Xb@a_cUPDJXj#O$_pr#$ zomZ}0nWGoI*(3h)z@&Y)Y2L`IrEm%?GtVm*;Nv6u&ZOr49{kEVcqXvWeSS`ye06@Y zH+BiG2tgYxSC2oxr-1iP54B|L@UuX`S#$~gC_xz9(=IwZe~fiwKY$HcFYhaZ{f^3mUg2!fiOpNL&OJlU4j_&wzdyg)$;p6& z5ZW<*;Mzus6`-bY1i)2CF9ODFa$*Em+2|Th(zVCEg|TCKq@|k&zj_9xB?#npc6J4Z zzi3S6$afGiwu)rt);W8BISv{bnFykH0CQjfdkt#(6>juhNr$aWJZcXgK0J^AJEEaM z#m6(h&1RRIR3!fN1hy%-NPIL^9k+@Z+B;)~-MhpEI3_5IL{VY2!$-s1#ULF(4J zz!+X&1fUAb^bP4bwXPbvy5*=L2b7hIP6*le)@-?Y^(q`o2EISKiH3yX8hZ&gIyg3j z-A7IUo-3=XCuC*ag$&KCfS&zHVq$q!mFYxpEp}DzIE*=2&43@T=`w@-eOnbN-IV9C zQ$NG{OX+E_h{)_+vtAS4GF&{HejK=u&C!A zaLsk!&b@L`9?ZR=syp&;dyIc|DXa8QSBxM3$@y`kp^{!sK!ipz$K10#Y9ExOaX`R8 z`T|~6YBIpvt(SJ?#{st%gNSf}+uAYeE(zVAA!h>{f!Sbzga&0#GAtqtC60MNXA0g1ND3 zQH861G6x)9p#5QZY%%34`T66=$>%yc&ri*V^A>mQFix%M(Jti!2Tt_W#TnpBM>tjO zg8dS^Rwv(6EZ22#D~K_06hLV3dY1?DFtM;)Y_PP^I&NaZ)%7~iiM`QWl(vw#&8HHu z`ybORn2Ikk_RRhoHpBifHZd8c;tw1_ZNh>Tn+`ZQ2*Xkd&*A+B%Uhg_le8)hNwl7a zEK^Wc27X%%3;UylLN>4g6`(x^lXuk2Pv%&X@Ik5B>Y;0! z#34IX^NK@8aV#lO+)^1W@5?0C+_xBOOSfQX0Apz3hu2pqN(&>FwU0ppm z#E`aqrT%Pz!DzGLcaQBoo!0j2M>7f!t-b1SL4)>UHu|NO&kTm5Vr*HjbGZZ`ECute zR~dWcvD)3*c0CVu8@s`oCk>XZoq73k&{y_I)FnR~+?&Js(An&KMrJ^H#KBF2;_}0f z%WA<9j*Ts`c9n)}q@bW}M|w+5&C46|vu}U}0Q)Fg06y1r?T)ke6GtXC?qV|7<#{zZ ztB4(*)M%f{^@_0?>nJ08zblQ)H}~hojbBYNW6!8%5h zXJ+lb-|juV{eg|$Z$@}23;$*qPXZN-nbc*9w2%8V)VD%ZFxp(8S1@t8wncoxjq;sJS8$HSxx7%J1 z!qC3``{#j?Fe`mAM*Pr|!;g=`r|5QpZ}72!f2&mvf}n8>lV7CjII8uzl=r4I25TQi z{7?Y&1NNx1oK)Jq$v?uJ%AKDtv73S~AT5Xl5$Gw=vlBeH6>xobLFa&vj=ty^n)5ES z^=_`o^UKV_vD>L?l_m1lzr&k{-9N_P?x4(T)+JIxF}%>MgghtAfg!H|UPwu8FgMU4FCqF5n9Y6wv4FV&hmSiJqf9exiGM@#VJo$p`2K{)IvKDp=eqzjQlSH#miPWC=2zj-y(B0-?QQ}(o`Vm;6H zAck}7u}atPAK#m&UG)q5o`^nmxgYaj{W3S_#S%f~-3fr?I`DB1&ImMSWJv@1fp4)G z$N=v;@bURMfYJL7B6s*Ja3YhW5A6_DIS=v-EsBZZ_GoM!BcJcbjIe6Zjb2kr=z=mf9@nO1b7=enx;OjJ z?5{9+Ke@u$pM2>3p#(bo8q6deV-(oUu|Fvj&RDp(T0nhL6R8luGTLlcr~y&b-d@Nu z{ZqbW$C)|*WZ|=U;1U9NJ_Ke-i1NIOhUNt%G~4{|T0n3hkmyrM6r@miFNfRv^ z6gp2&NnBwZIkokNNRrrjeZ@f$`KEL|wOy^-(qyxJ-S<^^=X0}YnH4)$hV8tBD-{$H zf+L#nDvr%Xx*-Hhl~q;YToA(kL=-9e$JcO@x=@?|zu_cUykdnZejFGG@)D!W2*X-K z=T%?`C_RAt&p>1y1YfB8aR0B`wCP>@?4-mIO-(e(+^9T&SW^>+m&{989wvFU+?U-JpJ%fy7mA&a|gGtF8$!gA=hP1WjCpKIyIx&(pkWD*H>XKqCz~+f`z5Mc_ zZM=#p4Hpf?*1yOd7Bp~)ycJ@LI*BVjcTcWpy1Cn>ssYEGw=si*mNKl5t2H@;jl3GS zuAJcJRQ{G>@0|2wo5XVECXxJnR~IvVk}d3C-**=JZQQ%MYv^&*vZggIR}!D^u2}&= zDxlO~Eunv`+zXfU;>C-muXI98P}*1m;+jsW!v0;w%G1{8EG{_tuwE>D;s`Wqy(HJ3 zeqIFSl!K%U3ld+Knx=Yea#GMRH#dJ-R#uky>^{%AN>c(6y7&Gv{3pf3)pc6y8XGf@ zwL2kQSNz#rJQ-`KwK{}waV*aq^d}5cd*zdb>p(zD$)P1rixU`aZdm%k`AXY>+5;bv z4?R&!WCT}6Od@K5wdxM_^+(0=*}X@PzF)j_bNZ!(Jj5viH%fryPdOp9t|g=wy$GB4 z$Q~p2&+`G^sil4~)#AhmKSr{&W0|J9Y$=J?C0CdLs;DD+uc_|P>e49e^6hBmBJ0FR zf##{vGIj)Os>B)%&J}+i`8a$1+8(7QKuV(PsA~l?&Esu! z-|iC~XFK1jpM*CV<)&ZCdVK107V)08w=Mkb%I4X-1r031g?~+#GQS2=(LOzGAl^Nz zKRK7-W4@OJ;R0ZA;z^)`2B1Vw-7Nq%hbPG%UmpdZ${;jzv)P#+K*_VvWSr2vM)^$Y zzW??$G#Y%}>||!412$YP2k&C6oux97d0t7j14+m<cm4w1uetCo`K{zl+%h6g4Gl0&{a8z4+{N2v#n+P?yG#Zv!rQ}~UO6ftb<@Co6H<=nY@cesVz2ehO2BS-N zmx-Od8q9iMV+uR^*MxdyFvsw+M5NQzn#Iq>##ew)&-qqVVepSa>oHc2_(T@Y{qzU_A&{xP+kp)#)k5`kj?$RX7bWQ zWikQ+f%wZiot>v(ry>&=xH^a9?qsqBc~pdgvk!Kpl)SMe3mMt=YcMBgbuLNHpVVXF z*Ez#qG0>JD4qhBegZp@;M0Q_%IF&=3srxjT1q){!eZj^=*L+3d9KvQDzW@ivKlw)L zFc0C%eE+x>=0-5rXp7w%!?DYOZpfQ~wia;WIkYg+pQe78!EwcJly?{XBxHrlmoLNI z2aXi7_!lhf7Wb||0+c`dQ&R2P<>1AdE@q$Iy?5_it;9Q^RdNll4CRfhK|pD$m*+u; zxCh_u0GvuYP2ZWv2g5?63i1%p?wj?a_ZjmX)cH;8liA(u)_jWczd3dHjo@~{E%Jpo zAM6Xpwi#&4WrSk*_R%e)D=h@n(7((5_`-x!8j1KX*c)9~gkK@2^nfD-&xzDx1^%nH z|2uXZ*_3c%ECQ$i5BF3*^LkpED4XAm15oINl$4a_CwJjK0#IqW5m)@`6=lP>ZCecT zN=u6cqMU7jzOHVV^zIoDO>p^v`j4&eBii!P3f5B}p6U{jE^rUtPOuX(P+Wocz$?7L z&prR!(Zi5ddRO=~K`l6ypxVw6vV6}A>b73_+j6$E#tHMOFyrUc;Z}mcHPxAYB++awV!Awg?ChVlQ#F zLWN)r5j4D7-&bNTI#wn>OXeWZR2t0OhwhaCj}l#sHesLzQ@e+iJ-OhgB{1td z|M^RG^4x_R8XCO84z~FH`Hg7GGnkdgkwvBVb>PNbI9qqQk8B1-n53UMhMhX<2OA|Y z{Rs4z(Q=+>`+<+bNNBjD4WP^yNd?q9LcJdH{XX2@Zu41oJ*pyXnjGN#$cIX|8VZs> zj_ehOc{|-lI9fEBUER>|V}A_ky$aJYE5VNv>jJQ-f82tQpOj zc0Kq=X1EKR5NX8{j`ExJVDtF03X~*TXId^mS}3t1bB6(olr(Oe+OE19BjnJzA5IHm z5b+Xa#vqSLidifl=xn!6E_d8a9ovB+{&4{%)uxIPLf{ zH&CtxW%?xSpTGcs$Y{WZUSP3({HR0v1L%D;g}hg>?U={|31cPgCoBt+w{Yyh!?OdD z#|lEB$m|=#4NOj5)Fwv&02nv{Ds)3=Z@?z|E>e1Jl9sBeDK}YdP@F&GsQL=25rZqv z02!`Aw0BZioB6^nr>|MRdISXBB0`w{!EQO_`}?~#QL|KIahLD`0obD4I~PNT`g`M- zY97a4m*{g5Iznu?8iNdgD)kIoIvW{-ZG@5gODP^n#wg!EUe5d z?5i+Qi`C)cHjesr^9^tCSI`NKDoF0_%xbZh02QT(v1Q>1CtE3AOewZj$ZOJ?kQ0gx znMn8VCoji%lUcMdQddb*{!BbzhFZ8FppG4T_1`id_YN?U$GUK}RKai%%@( z1`#J9%XyEL^WYoa3JU6+6~O%$hP(n7OE17DjPE#bSFTxe0qQzZ=p+pRsQ6BQUP$aB zo!L`?05C&cDnVbO)@@T>jXhL3lc!F$Yg|xe7-nHPPt+gyWG&w2Gi8WvOQsj3rBMpz z1WIfru>$+13;bg@py+9M6oxjALf0W1*L{tZxq=6hc~nq&;YK5s&^Ei91?!%STk5?x z`oH!7$=QZP-MV!aDij=?T*AW7_x)auU?D)@zaAZ!@}y6$Vu$@!+V$zLufAaZVB*3| z5!FyZGK|n@6|F=Xi$TJHwKx>&1NhwNXMLjVe;>cXH4KJ?Dc`6Xnh1p}XhVk@y6S3Q zw*Hsfd41T@!*sB==(h2dhehXj`F3y!{9c)z?SUyHB9R z*TCQ%gqvw7sq`-aEd*CFT>_@2mOcu|z@aBwv{eqP4oA%oGznoq5kZ{7Ad}#*BHJ7o zV4LO)q}RHl-X$D)Os|2RDu=etvtR~F>aEkt}ZCc6A)U- zMLz?5?-ZQbv=ugNc#E!?w82M@E`u#+vQ=BMWC>0BKz1M5g_6j^zw?5EOl>roFJR8b z2;A_g(BUX+14S7o3Zk7Y8@3YyM8lz5;Jpj!@1)6Ev1A4u3y|hfCx)Lv+b;O|jgR}_ zU3>q216*V1`S?4G5K%TZ<^<9iXfKS$@Dj{cx(nktrh9>rCTxm>39bvWYvIIB^%ys$ zQW2qMmRNgu-rS(DEckfA4P`@46xH2)*JNfEe8TN1z4}YQf6BTWJcTk7oSA zlL#B2@d0CNM?E)=r)*RlS{S%S)R9|%d^RV%QV|w>dTpX2=ggTyhD$&(pp)yk@t@Kx zaF|hy1|R@p+GpAtQ0EC2A(ail|48tcT9qi|74G2jL8o>NoL)4*0Ou{1<50mbTOuZE z&5a?uTQ_F}0{W2=3+ENuG@1+rlCve-NrUFvokth;7ELituE8;3+wNmNlzU%uIob zPJ7dWaP~vm(1mahrWVaInCb=h5g9>##A|BE1YQk{tb)-FJ`HqT)U%%u4@U=Rj8nh>}V z_8zoJdojFx@N4iVk9N`4jSmFs4Gv~9e|SQf373uGU#Amg%&H=q zMB(J(dTw63)fLtqk}lz#e@kv8a6$MMq;VL-W<=T#QHKx(QR#Kzk0by{gJm#J1ftsr z5h!RLFdf3<%Pl2xdqEViySvMY8xBaiw=lNNhdK&UwHie0OSIP@_GNzd#?JJq`^A8> zq)a=&8kv1k0THp&t7XQQ!T3w4j*Ms|%7Efug&|q!S3RH{AxUHz>c?ZYBzbWhD&$A& zj1{47*(z|{0~K|--nq$Hp8xWiQHtB89H&-JaUU5_IY=JmKcd*Byt0LgmBd`Z;~gJP zX?uCztF04*#S39BxMN}Za?>e{Crh&1(Ixpt?fg&R3Tk|bnqIPCM^v{+LHV3sJOfeW>Ce?G`+#a&ohvn z2vf#-!_f=0bVGeje}8{AZl5;4Va^28%Q+wCG>>YlhsWNjZ2FQeb}PBY;Niaht$9fv z=Y}SRm_H@yrbzRp8J^OQ0wf$n4gFjy%ax{n5%&yJuTUOVps6|-Ef-0n6cmVaM4X`2 zX8j7VF2HMq?9ofnfCkLr+qUfrEIp|B>rfDBXQNb-JMHk{j_Zeh!AN?J7!Y{Os8xn{ zghm_TsAT0j0fw3W7>Xc>@^#RRU;(27i?bijoV8@2TehsE&p8P0Oync`8&lkN=m_JD zQ2IdLps&IB(rQqM>!8@fOKIJbwF-6;9G5`GJUi>Buk{#T$OuS1xocdrsciE6-z>7) z^%aW-yUQ+Q1Wzhtto-E8@@f05S*h*zv|}&>WPgvnA(vB9zjr~tuDRP837IA6VoY0im-Nk={qy0ICzCj)1BhnO8+{2L#0m}4s5|H+;d~|QwmMg#HP8d z9!f5?{?vI}plB#*dhh_l$cJwOtIH;Wi)=I5kH@JEHjMQP;@)7%Si3&Be(f3+Yo6SC z%t*O_o(D2;Nr}q8s+jB{{o2vn;$yDqw?6b=u*p%8P3OKQySTfmV|!!l*4&(&i~lUF z7Y^TexsR#x<(%JnQR}{=yu83!2Ip35fXK|~4ZkmFu!qV8)5-VcjzFtBlvRC0F}2N- z`iGh0nqEoL#p{(a2nh7hYk8;n+_lZ^%lTG~eaU=(wxyb9zAn%5n2?&f`Q)4Bix-ZU z1FsZ)eij_!yC|UH)yc~3I{j$Mf}eczY!#BjJZJpd-LKT>NC(u z?{`lE)EL-?gt`}uy{FNWl_KmSdV2hSi08&W5Yfio@ezk~kY22fS>yDq> zkvlcOA;W*|&E*l-e*48%=Zen!Rfk@n983U}j`i_>5lj@4rOz0UJ>+5!VbTg_6vYAZa|u z_*O54)$y5dS!iEAj%}#^3WE)>}{#dokDt&InHQ zNDpyPk?-w(Gmz-1WPe&B|7^eW_V+{{>%DJplHXs9jQq#_t#_Jq-L3i+x*>h-JI=~_ zO`TOyL^q+UQQooVAJFiVPmrdki%gMmwDvGa28V_!K|3JO2rh^s4R(dUL$JJoCTaR9 zycpIWNq130&r5wAe3hUSSE0K>Y6lk}7B&W+DEGP83kY(-^9hNKP6V7dWOl$9MhR+_ zff}*jP+pv)F4|lZVhH}o0-h1?B(Z%Jgdk-BG%Pw=g za_#a7>$m34Mz0%=F<-zz2@5H`E#Tk3NnqO5$lU=a-hs_v6yCO-$Q!?|QVH^MJY%&w zuHQd}cL@YOWH(@mrlAcUGoxkMAaf~Kgw~eYA&`Avt0Gn5j|i8rGsPc5VTH>QNtL<{sR z?IRu3*^+CYwrnKEL8tF}Q2PB3Uxj0Mnfb62)r$xh(ZJAFL#b5NzNG3s&3ZqS`Z}Jz z7vzXn7hPK!b)ooV8HUg%Y4yA4Ce(ZQ|GC8%G)FWi9s&gf`e|&2Cgfwt$_)Cqk}W|r zsrWCWkwq&OTl#WfU^l`C*hY?sB??>AandRUj;}zR@|g}l$|SX4im~K5m-+D}n4+CT z(cxkueBI?bAKo*BaUl6)LN0ASGgxn_lf!8um^&Z>!V5jo9hC--_A4EJD3=(Eh*1Wg zp%W6q1mSF(^?3oAm(mrqf;jP^Y0e`s3G2Q2w&{A*<)dpNzwZqUX#WB8YZYw1txQXP zP4pEb=c&4P@2kZ&!JJnBgBj>^gW24JWe>z4xZ;xAkRF{dl2*OxH#X@%iOuL_QV|$C zkTj5)S$@bUzv-yJWE_N^8wC?}stY~t{RS8XSC5*1;J|^<6(O9;)6?d)ea@^bA%fc1 zfriCYaf1;gr-%kzHrObXCxl#tQHo@@0Yibn&MM^~&@}KEtj;$Ap9nNhzy(&LCKQha z=gvZ74V|7Qf)BISPC~XK6Au<37~yqt-jbO3g}J<%D6xpVhryMN{y_p|7E(-^j|?Po z(n`UAAQz8G5SOsu4r^&CqM-sy+zEbX^3w&y4yu!w_Mm=ua2X~ckR;my%HcD*3(Oh| z0GWh0iwBjmM6_*p_S4ej1WrWzouxpRLl-7{CIC+*O+$g(B*ermEh7Gx;HsfdDUTj? z5J!kk3bjIv=@ZGE0BE|YrmJvZ5J`pGL+lYIB?{a(WEo|YpSekb8!R9cCNoN*9uL;U zsE1zLZ*o$A4i=sE_CL2bX#x{faib%Hc?3Qs&v)pf{ud0Htl@q7UofQg{>G=>ZE8{z zTILmL0*{UzGSavG#vR_DA7A?acVkc>Adg`I;<|i$;()V<6}S}L`+t4OojWFBV2A80 z1TsfJOFMv#!fX-HBQy|$rmCRtB?{Eh5zr&B8)U{qtXP-+59k&p?K>F2RhXdrmVxsR zw3I!@OLFRpsSHv@&HjzCIi@pU`2$P zunKl<%B8{sMI%BWPmF?BCfN=BASRi9ev+7sh%c2gI)X4l02hDxic9|h8^Gv}6P+lx zRzKw+$elZ2@7rjb^&s}SPV;`)K~z*AvEj~rpD4US#l_+OF&XdwpUJp@OvdKSwc;;* zy;J90=N>lvUk`JZdYE%&!b8e8bL*u$89cUEj^!8C8g7=fK`VfsmH}-b=H!B2=|bzB zS}j*WBVS;tkpvGw8D`1?XZJ+Wgazt!VJM-bC8~i6Gz)NV)OncKY$i(vRDjpW%@FAh z85$ERNohm#OF#V<0}QZ7?<1ZBgYSan8{WTjDN%r^1;w}%OV?sM2(&X=-*t;gY{tjh$6@{R{1yRqTVB z+kBg^#tQ7u+Fe8Tj*fYm@u+Ys*yRplW(au^5E?N>jU<;Ks%a2n-8csVL6gX1f-|U8 zM5K`e2$S&;@tf&lZ~cxU17-<~BK6UKRg!4=XqpX!aq1xCE?>X?l{V|vt)1sR7E|&Y z=|@+kHV@oypw?m-c2a44WCG-I=%qw;0nVZ6BV`amS14&I=*`F*gZ@k%ncte;6;91I znRA>Ui|Lc?Enz-S>VcR<@5~x~o_$yO;&#`}fteZ-1N^c$=VFx&~0$;BdmZ<6O%j!*^_yfVEMuHhMjCIGRGdu8_iDUq$4;&EulGRKrg-}J^c&C zm%!zm^NL9V0}4)(xH>G>3pfQp6=B;~euA|KXr*$dLLUkJ@Iyj0g{x@2lH20WD!r0s zfyQIcQ*-pzdFAZvOTV#To_`b?QxQz|29HvS>}6Aq3m?2M6sJP-D*}-QMV5kdyTHAZ z!wlvFq-LU4mcQmQMe{&>At*2~>f{E&U>_9(F2E;Yh|g+f<`OWSRj)2AZC%RKWG#*| z02x*CK_DtP+I^J&R=%KFcx_R=FT-K7DpE}ezzIBoZpgOn+hZO+{4($Jpru)T`CatP zW;hzQwx!da%Raz*a4MuKd6~ysmZ!UPgMGZRzOiT@K6`%+>;u@?B~0og@PLr6ovkfG z_Y-LlCk9Hu0)D`rJdT4q$8e8iw% zTgK))eG*JLVp^uSX`K-!Vjr-f>rGGU9kbLk@4o~{I>iSt(S;1%U%3O3c5^)Wd z@K9#&+uqRFh_D~#EQ0fYz`F!17hD{S3%5*~TIFC?D=aLO_RXwVj-RGsLDA92Q@JWY z5RT;8TdO~ zuNdCHGXu`$<`tc=-%;{qcz8IpMP{l(FyYpYyVj)A*-luoBsXK#8^vL=`H30t}J&1{Uecx~|Snq8Wg5;5^9ccRKX& zoZ>B8*cr7BgZBYZ09JlTLDirSpJ&i(fYP)uU6=;99pu(bb=cKx@vW@I$W#B?3Qd~)2qSTAh`3DH(%nif@r^&ctlr#_RS>N-l#~rHm4K$e0FcT`HpVL!ju;q#1LU~<^vFp>M*n)CFgXn7juN=r zPrqIUFu_I{c}%?s3k$>LxrAd*2s4#Q-Xl^1Hx*uqzr6@jf7Hf9aDgxdR4WN@P3y1r zZ8-g4DVM12Qf&IrTZIPzS?Eb^>(9_i#K2s7W4H73{|?{3beOsOyiyC#CIgKPzva1j z1^8m0#4n(jYfA^qE!I-8#WrAKX{5tnkrLDi;i-S~cMSgyAHanrqXl3Y+0TeffGtS` z-vga$7Cu`x+|?960KKW8!5roBbv=ZFa&QE&kas1f6l}!+K{9LsjMCsaSi?3No|+uk z0u)aJxQNEU7rP7c*Qs=kHU=qFQ&$8>LhkSFf`Ul?%MJ;TkB)vO(1nO>9y%SuCon;w zCoaXG(%h|GyLM68=mK~271;)^9XoD7?Hz^P)E*@WW@3VG&})eAH}L4bLm~lq-dh+t zK<5r-kl_!tA%vO1YGyc{!n0CzE4L2v7kj2^&W>BvUu%uqnR~l!gUiDt69H-OBF)D~ zp_jiD7z0&^NGArdp8+@O+8@jnCRbZ)Y|N&Z&=~vS(Y76YJn`yj+Dwdw!Nu@Hwv;(a zMdJJx!B~L|+g24wNzJUMsv>rgD2QS7D4h;PK4!ArzVQrBGJqX1CLY@K`1Q@4* zl=v(DLO$TMckd2j7B_Wjq^#S- zMecK--bvZvVPhsuhHdqd=@B`h9!dFKJg?J=$Y$f+=Xxkq1C>1`}TgwY~)9z1&eUcP`T6GqVoH^OMoy2jkx)n zi7HP=+kj_6`EO0r*c69+bCK=zS1bDLjmi4O5goj-oMGoyO47JCeqVv^GhI&d$!MS4;(; z4}uMWjcE^zO;}$8t+}DiwzMOVg1p})e^roFHm0-@t{wzW;xkI;r4 z9+()gi10Fokbs@1=(F2KJSYK3h6{locrtEAN2{Y%P7P-O!G-1m_Y*)FA`zIeFQ7SL zMBYW{6rBL=B>43(>>=uz)RG)#xZf>HnE$p=%Z-dOo8P{_vqv)LTF9L1&H4^XmS%1Tw9x?LIma-9&bSeHM!h3!qq##d=GCkB;u6#D#_@@v4uEfEPmU!*05FB0 z5XYQD@p6fkYuA!OOf59c==`8>*9{244LR*lxHxI*7niMn_3w;FrIf~Ajpd;iK5?X0KvxgKX9G2wTLAMA=7#7|dJmZGs zaD=SU@X!bmd?Mo3$;v!c@*a{lG>ngUY7UK^f*)Y$N(7)dobj0Jz>Zdu5?w$CVE0GS zr%LT12%*YZyGGFzV@$CV)416%^|BWGaAR=Vn81RdH4U%!?DrQ-p4xD4hhBZx(hm{KlArk;N_s^(NOMd+`Zzc#&T&7B z<*b9i!Oqn)Xoypr5W}-F^{~<_icvdd5P;|! zyIzU8=C!#njykmU)KH;teGrph$K0E!Z6sdeLt5;s@Im1}v`T%#R0?SelLsNu)oq zO8=ylG;pYY>O6l1@^%+d5vPn_hq!yr?WhbsJISq)6S8Ds-s56z7Xs`?Q4*&j+UUBmow$`(awaT8U3i_A-H)nH?R|Ey$9t~Gs&mJtDkW1p-F8ClV zZ5hV8fwHd`pBG6V35PG^XXV3FdNv=Au?!9wxwReBPGNnMfgeU?V#jl{ZpA7oOW$p? z?%ro-VY7+}B6xEAd(lE228GR%9>%&uAGkhBA1PFc87Uqfb% zxFs4WSjQ@rTG0pm#G@bGx#f3SOxn{3j$KOYgCy78(JTG@)cOdL7JE;9e70pUe{4OKU7x=YZ#u)BLu48 zh6gG-xuaQFD2YuIMA9F*NJWxK7eZQ0Y;3cQIu7c81rNJW!`GJS=<*9GM++}Sz90{1 zO5RU~o8y=Et(N%O_AR!RC841y9a8~vTj#vk?_gnZx{K>d(2FA}Z`q}sFN4#;vkc$R zLmc5dTZ)7(IHn#XT-s0B zNRu#BmKQiL{tm@>Z|M@Q)e`UVDRojM=sYYUCc zY@l4pVFJvN1zH~&cbc-(f?2BMlYv1D#n8+1z&vDVm4hfCjZqr-8zO&+F-7hfjB{-5 z$9J#rr&pn-s9fa@<9PZ)ue3xrN_gNTT0xIExH zadP0xl7JY*iDAPplMQou!4pB=pfNNE$Kd6?CI{$&68GFc&LH@+P9R?d_b|o?UBN>W(J}{@m!O%&*EIVN`p*ox z6@pXTtCK$8KlMh2?*vcm8n54PZ?;xic!@8^)6*$z0j-C zK>^R^V1Qczv+j1(3M#J#i?TcRP6d*+;^7d0;JrZdOoJIw%@&iAw>ReQDRa;b_k#^v zF#F4DaEb2lHzT!x6Aq$LF=*j{=;-KXB&Qp;8~Ocqq0rHzN1LUk!$G`>ICPe1^_U?K zfFw{cY_jEV-gID#Jj8{fj0_A0ZQkRDsA;7sS@-vxlSF?AGa3#%`VA0v>;HA45W!61 zH}4YsE%kw^u305U|9kI?Nda1`Jvd{vaE-vU^H zy*mb{^=M|{??8}g_b?LsB0P1_zkh(yr%n(|A*>4jdGh_$DqV0MB#3Rj?z*w@~YT+7SChFwDe3DaX20D?y!<(Qx7_r(MNHgJBds5skP8#zLm{Iyu%T5Jf2|@QidrXqp1~UD*%^L!yHL&JvVW%IZ^K0=x)q z_ctg!gBdHDt;GQr08N#H-0?Q_c6bns2{sGd@D3v#dtk$raSJC0BWfmTs6@59wKyy! zhcOwDiL?O5r`{Sy28gmC(`aOVx*p`QFEovTxDfI1GU1I;3=C%&l=tsB8VJ=#)M*O# z4ZvgQ)Ifw^sf&T{*$i8F!Hee!_OpW_!e|w?)zwQWpM+9kj5l1s{C|yIA zE{w6db@L{}8T8&zry%Fdfl+oh6b*I83mAS1g$ex+fHm1dNd?5RK-Yw|OEV#7k;e^# zy^=_Ol$Mug=GaOs#Pn44aDy!Gkkgo(1Z5;X{vsmB&tN{rugrm{ipV2LxkwRU6%quh zbOILx$>jl3Z_qP=Hp$>|LMZD7o->Sh!(Ag|XrD9aU#u!}WTLXt`~}RO+pN4AayDYq zH}BqsMtBNqjP3&>1rA_ux`u~u;H#mJk}tA}he!iHf`;Cp|M@p2d-=R5BvF#VFf|7{ zGpBJbCjh6r5i_b#IJWx)^g~sBkjc1VPKsXjp zjx}d4gs~Hc^jR=B6s?t?9c9FG|KI{4hdbu^Uf}~uAx+;W0a9wZ^buYRxeW{ zx!4f}g+?=hSDi9Lvx01~J%*ywa8m?O6Y7NlTWHokY#8YEXoMpIPU4BpCi%A8#~@>1 z&2P>tpSg{JF(igiN|dGwlCkrtZYlr+1*UNM@#qKvfk0$f!2(0)h9G(P;APlh1;otM zBbYy}>1|!#mXf${Hu8?S=si>4CJai4Sqbt2+A;9op6_;4h~R;nzr$4R;Yb@s<%jx! z#STX(Nls*8#oUV}&~%%uzs85#cA{{`AOu1_8`7hw(gOV9yvE6nUYT-c+-ebT4Bc8U z0C0quOmUWwpPqLKIvq-F&R@zx7Hq^?fe>lv8WB)5MhCTqth+2@@&36MPc0e?`qTU5 zM5~6W?*i$Gpgc)L&KwbpGHUk&C(*FJDrQdhJ7IlVGnXoB2Xkm{*kOhzNktkCj zMU|Ir(n}!U1q0V&4DB}|>49oLm?5_qdH~9ZFrGz?`6opvE!DUr<=Ei3KNvK>8c89v z`$MjQh@EtzfMP~1cvzxb@ej$0m8W(~N2)>4B6tK(CN?z1it#0pvoi1+Ob4P}y=G16 ze+Fsbq|YxZ1ouI`=VWT+tU$nJb}Y0I9V=!;cmA0j{)0E#3bz7~$B7f0sXzgmDF@N3 z!#d}r#E6@8FjDFc)*6L>fh(&3!r>$9K&%Y_>jEOJL-=Xa2Mdaei+|8~Lj*IC>%iDC z+(LM8mDqi5V+YZb*qW>r|2{_n<_2U8E|JIq&k>&XNvTe76)3utDit<)%ZwXKDko7o zIFSDMO;$7qq3j}F}8KIFN03 zhGktXH&BAH4R<1(1cwNa4lxBdLAItv?;aVB`^i`+yl+oMI$Qr2NBy*Z>BXV%w$WUb zswhKZ-+h+6h|6rd zyuNN{Or$h3E zi+FBFeIE@>H89cEG(Qw~L>URezP9|_e)sH4r;Y?Wln5v!f zBpWZ?bI5Ry@V%$v)^8qhnBBW}QOdsO;S$wM4AT;=QaWr@S$&GX#RS`*Q`-bsQ z1p_h3bd!DGin!U8avyM9d+$=}SgI(3t{@@p=`uGrx4Hk!^t0Xry&C}M0lFTHmj+0c za{0C&-IBs1xxduGJbzV=+Z#)=nvd?>7u9zn(!hab5W-H~3f3(<`{$AWvuk?Pv;C(G zP%gXo$Q;dA@d4-WW&cxTz>_P-S)e7g-_>yJTJDqDLP>Mx%qK5E63CG z+3f69D&9&ub4K3n<#mo?d&nyF>Vuzahy9z9veKy3{GG`|JF8{7-gb(^@=Xa>zye@y znimMXUGrFz2cNhEMD(5%qx?lnmUM!o-ioIep--Bf{bdI}OxAPa1m>htVMga-n>VT` zBP;s@$Is~4*jAi4$ab#5?C;l6|9#`RJT6z-xf3^5E14f3dbZ7MiAozM&!{!RdH`^v z6Yn(+ADy>kac=7#Q&X+{^BJ5J<)DTawU4mAV(cwEfA=PyD*%DwAe;@@jR&J-!T{7F z;kIBx_?pTq-XFA?k9p(&Ve3u9a@^NHZu2}w=2=LkNTraWOofyvrHoNRD3vL!2$c+3 zgbGQLCQ^}1p+btJlCdNqMXW-Gp3j;6e~#zH^I{)+@3pA=zOLVJp5N(==_wOK*cy_q z0PrapRN`+3Vqb?>Ppma`WLRqUyH5WLoS28IuW>$q%J2+=Q(@{{)h#Y4M#@7&oGUlY#?#Pk7JD!M?q_z?+m@=1a7?#cQ7XjAdv_dq(% z=Dkbs0pl6N*uWw*B0_`o7tD#mV{lD1MG3@LyAO2t>`BhRc3OJ3Q^Ub@Y({=~KXYTE zvETmtZ{xxita>-x)OP%x*h|B0O-=idr}gWCOXoecUMW7l-ltt7-)u+-SaJlU9iY|- z^$Uj>DE=@&79f)G3l^MET|XTsK2MWQRFH=_3~z$uQf~i%)uGktLz_U2!z+f@0ik(# ze(_`kRrBV}TMN8$K73jq2WKRLt(t6WrV1?5D4?E^KLume-E|%zi)w6YDh)c1nU}a- z+dfWf{%+-%3}=t?7n`V$VmN2-&b}X4Gf&d*;>8gwtp^>qtXWH`fz-O9q5?=A=8wlg z?zrZbKbyo;jfm|#JOI*{P|h@{YO)vZalZiR3CRF-Ap!i{g{I0KB_>RWHdu73cKGTu zvn-!ZZ}U&#BZ7^&@1X;)T)9#=;mxxN42tEAoy5z0{n1-Bcf-prX-_hiW{>G$85Ecv zX5g=+pm22mzK0*LJY!}MJ6U=_!oc{_qeqVxc6r&0%i5GzcBG^HC6py$g#LJK`O|~6 zOV;rY;}NA3AHH_LVd*;S@^rakk^P>Sd(la1>+Oab`=g*(he{v7$xhWvQ#!Vo=r?g=sqx++MhgB)xuc3h3_{Mh zzo@B*-B3`F7Hg`Ub>xqYY&%Qs+!^&+zmI$V#v}eg={P>bfE!L11vtmBzS{UkO-&7= z^bKjdx-yiENogqg=>C*D+GoulkfG_X#Mz;{{)-azb7?}_iIK>3c>Yu*=&F$;00V)e zkTipihYdQi`1^&pxYaH$qvrRnJC$towGOT;sOt3A(r0gVLCk_io?G;@QkC$Zr8_17 zP;BGAWpFath-=b&y7w{i&sg*^zE>@W49wYa>cpnOCZ%!eqlToj!emK8y!Xk_l<8m^ zS3WnOprzo=zZN++BJxw{VG&i$%wX^)(%OQkZ>iB+QJ+%t#l7@uvFICJ^x@1G{9 zO3zL0#^Zoj@wARUjqZ%t3INgiawMY)5Sc)Exvk-U*3lN|3>}*-u|JZz^wxr*Lm^FK zlHPWMYL%=JghF~~TY93$P+$HVN&gXkXz}~^-m$T(7H{bx$oD1Z@0J6j_LnIi*q&hA zM|XG&uLf7=eTr+=v`LfK{6WHlgnoij=QgDX0~gqxQL_RB1@`efC)eld>eIv+b59Y} zpC=(H67mxm*j)xlNK^zOdG!thr=7rlF3gK$N(jM8{T;`EHyA*QSJ_cdo;=Cf6t}!s z&0M*odI-OPXXETwYs2=JoDA7l;6Rq)xpjg%ORd&PO+MTFOnSFDFz#WD$2pVj_c}HE zz&PrkmxDH8snYJ5}9v*R5ZV>I^*h#u1M*k=NO-uw?%LSP+DVL3iM& zQOWaeT2Weq=ebO>7Rn|?F{8l7H&96)Sn2r3S@qs;dTBT4%Ze_oCbj7?E%a1;t1yG{ z&KYk(X!9bY8L9>&{2GPFN$_pugT}H&>s-s^o?yC}Nd5sYSo9=FFp&^b!j2-VfQRIX zRYzRXc$*3B5d#MLg!;O!$=Ffmmr~$JBEwQHB%%Nigmo7SF`n2A6k%$&s}()z6X-vr zV^pvIleSJ1ABYs*f{9{EreNdX%x`dqeL0)~3kZKEQ$imZ>3TMDhW+FAgD51_Z7+XD z3NW1K#TJ`HCOPT_(T)PCcv1tR&hW90Myr^dM(^l**RJ32;W>(lUs#=7A>0~k~{+6I}GX2wePQ6^4>+9Op#&FQ@t@b(rxAPFTWMXW7PQr4_Yrm`KZm9lvSE^=Ksf;6?*c26^ zENo^Uu|9WXJFIQ~9DqZT>+;74)?;K;BpDpMkx3D=C}j+Yyd}i=1a*w;TCSduSt_q;NTz4p&e)%rzm zIHF)m=bzA1({9Ql+fJ(Xhlhu&PF(DwIVj+y{~`s2*YrN=4w`E;HQM?X7))FHSnt^u zU=~S2Ab;=Dqfk7ja@+x$QP3@-Yeb~|`t4g;K=MryL4iDv6{Wn8>xzmKDNyCmCOXE= zszbDes-#sAhILw=34S7K!KCMnMS6e_q;dN6PLClrO~7MD23K5}cyjpdV|9juRK;>2 ziiWgg)Y3KG^Ov@7fVSn_bemWxb~T&41$+tMVi*%j74ZX!IJIW8 z?h8%1VgE*jo&Y?d@1=-<@QruKZjk(5i*b5uqMabXKSS}h)hEuu z*kOM_T}2VbUqc?m$g5M6=I553ouz`~Xm;o{DLZSsIr;{E&E8$wec^4Z+Mx1|RUayD zUV4&y+_G^>%J@c&o1EMDY2KcmW3wvnt9dqP=WD)QexXIU%GwjqIo+IQ}-EptW6gY=y- z!6Q$8`!CA!Cv3Eg-y{g7LSYP{-b+(`6gi+#;9$9DCviK90*#s)DlA!T(g4#1NSvNn=x}bObNx%Bj&YoKws=P9Us*y*nC9(cu~|{> zzo#a3`Ld{i&wS$%|7Hk!Woz91X}pt{fxpeQyegq$;imQi*Irorer7$8i40~hO#*RZ zBgpB;yA9~G86!Al^289+6Q)6z4}EwP){-&HTSPmTc~o1{q4>b{XUv$vGXQdev3PiL zVoS-NdYZ6YLMa8lBKgu!->bp+(Egf_M70=dZaY~g-uH`F7gvqIz12T@+_~=m;pDeX zrYd#P$Nwr`1inai=P3zG&vBGSHOKm=$S<<~b!6YZt)Oh6$AWuP*I3S{^br>#pu@vR zON~FjzRo}v%1>ZIS+A{J&4Hxs%6ot>!?PN+yy(=p&#NSLHNm?5k!7G;CyJop6n{sJ z$z9gXI_~AoU%cBLty0}1Viv?SJ3FZI%pR&+>KMJ{fgdPcvKc6qU&e(IoM#f7??MEx z#PDiYFKpRpdJ_;Y)+}9$pT0=6E{Tu1%CDpQ>1KUfr(9RxQ(d50d*^kh82*(FNwXI( zUTjug*WB;>aQBY-)`1%%&quUasr&WR-L02gRe?uUAFYpxX4N%FSrTsKbL%uRPo10F zWgSW}f5z80+b3X}{jt3he>$HY+xez76{g!zu{yEU+v$%^dW3nQq zR;S5xgWk)}N?)O~m_h7ib$_srzY<_Eop;eFK(R~r&hf2XGlZHB1tSNM zlF_;i8^FI>`Yj^atH`at%fO@gv|?^x_za&{Rt4 zPoM|>5$(LS%^O)ZuIrJ#0|G4lF8ya;>|9eoR&mO~ES!aSB+!^1D~kzPd#t3Y#9+8# z9YixSR_jetTEUD^?~kwj6{w+1Y*wi0B>P?cSxp7%a4=}MwMWIWAecz8v!Ex4Us2#s zp(sLGius*t8C2==KY$2?|CaF!w2|Ur16!g_Zp+`1DVO*Bxvm6s75|)zy=^Pv`*Dk0NrE-U! zJta;Bg`Gq#bnosvGlGW1|JX4T3L@?bRDcakt*!4NxCEMX&XqU9&0J4wd0wkkt5)Hr zr!&UJhBB1qWOCR@?c>;qNQ^5gFP~134`lZGC82bGn2j!f_$*LFaK9PC;Q)UNvW|1Vc*j2W3gab`&cN zBohT8>_Z=fD5Nvx0QCzi#KpOn?i;&4-54uQZdu5IvF{EkF zo&~a~q!E%c8WE6&C`2e2!3#ty>-6HX*pz3f94za!nRl&IH$+Da0SLsq{VaQQM!BO; zx0cnsh|hza`efIp$6u>*G_Ook(o7zKM2qUpc4> zHFIWpC2rc2A0SyC6PGCNd47P-4PDEjk`3NnAUfG)+$fP|71K8Mg5>86ML{t#PAw$x5o zt>`S?aeF~K50w1_sD6T8PZ&rVI5`XI1lVwI2!1AA-~3zRjxxvx8qXb9H!K`SsTkN{ z%_X%7?i-t9I)cY1IQ4-Y*z}q_Ihx)~m`BdTW)NSmaR7vppevz=h>W5Z6AA_})434(+*GY8^uuWXaU* z)kB_uop9^oHGcZUF!}7T0^>O@D~4}b`s0zxY1N334Gmr>{<}{1VbbwFvESN8i6glKVm1wQ*R7Yk1Y zw>+Kb*{oeEP$eSvLhRt_`N92spNzQWLwvqw%`X2#C&3|sV#N006JmU>`%hi6Yvq@i z{;l=9CQPsNb1bq@?x=J-{k%u1Nsl=6TgWT_Jv79hh^kTc8ZJlIug`} z5-1_5S%fsnXb;M-{Xp>C0V0utb)`-DUsj?Z&FBF*>O?clnZ-9>h@TG}`m*~clLZU< z$hp8QbxsRL_ef|azVuD(VxHI+I%MLFP$mJU%XS;{`#*DN8>qM8RIL~q_p>Zy1O^jb z?<3wcAs(HJP`kP_qy|kfheweV^b~mlP02u3n$1Jlzl%#sau3fxFlnN{5>&s0PZ8!0 z^s`9w2u$_=4cENXXs`J>z2R&0IzO_AWo9Q~kFy_Gr6)DJbUERi|GUM;-?Dnj2|}1AH>> zs5)^Ohr5Ka{8eU3x+%4nh?*pL8XT7;i1k309_tyj03g{9Rt+u)urHghC7viaGYPu( z_VF2+`LuQw%EtwoN7li|NP;ekDA5#%W`}t($tRaeH9(XJmm?LSdWHj61-KQUB1;mS zhTH~vSv~jyN(fpuXRdOXEe?2>A&F8JfK;tpw@zvZgAnWmp!<_KgPfe4xTS%|vqo9Z zFhBj+^v6iiXHGN(G&i#vsA8_#!qvJ(1ehI#PI;^EciWC zo<{{2Dv*SP%+aEb3sARx2T8vCIeiOuN(eY2EP1Wq|7I!&d77Lcrlh8@*w2(2fD;{^yb|Zq-GA)P&%q&viz2z&5>1s! zc;vUnUg)~yopWdBv|2!U1*gE$(9qHtZ~NNfr1$$tg90=^Ofgj7u|grQ{NcWKPM0<# z@M$<=#KjFmJR34hQc z;RX8{0E$31-z}50oOpa=k4GDymDay*spwh1*^#YM969?@TK?8~W}azgQ=ImXEy-KqbMLET zvz4X64NG0<+BsHnB$aSvm(nLMW`D+v%t~$%r&j#-?G8RU9}=E@Kg7FW1MGI_Y_e52 zu+0otSc349lz*P5NN3GX0jJ_2UH@I(T^Jy2D6T1q9zWfPakww(pX>nwM0>sYbHa=6x8Cr-iSj2A>ba0Kf>2oQVr~;=TOF&2_0hba* zkF5T<@FYZ7Gv`+-0p9pze(`|sch zEE-aQdja2+XfeT=9|7MB4L37lZO<9>d^G6AJn>SxmC-5!T`)fZs#v=j9@<|@jeI5IL5iYUDkwn<0bnInj2t2CWc7lEHC^Pxk|KaajT0A zDCux0zeSF7ip}WvJ3*mY%sT7D6(T=?K3+oR3xHcC=Yo8F+z9G#vzFR!xN!H_oTG7N zQ*7^8e()YSY*=>5Vym8L(GVoP%L#w5>{BZD0LAsOgjdi2{8baxy}NduY5#4+-;CYI zXNYSCqgceQ(NlZ zqCuZ;viws8qb!SVbse*5o!^%aKR?mO*|N*)TjRK(BJ6X*p{*Ko@VD8=|d*`_3t2*$?En~gR|68Fh8s)T=sVFcxn zw+J9C3Hu1YiC>~PpsJGsg;N?-9@)mlXWcjYbnCckhWpK5s>&M5VMERLW)2Lg)C$qA zNX@C~ywtZrX!h=f*9W zDg&-rh<@{8!!2Xyg{NkZrJovN9lyG2y-ZOemPZ6zg(%V@9w}#|8iee2n zIylx>JSP;GHtn~c{aJ1}-EbFt_OoZ*DTep%o#FS5zrZ3Px1bld)P`>r67_?fmjSfc z!dVmz#Ro#7M}LMcZUttK+fuvv^4j+|X(JG%?CvtmlXefkiXl~<6#P8q5`NlJT$}Mu zFS?=H5q}#intu<4iDF#DEu4xx&YUEd(zHLu0`s--^5lm%I0r(GrU`1VRj<)c4xahc zKre9B+5;VTTKYV6^4~ndvf;|7@oO6IS`{^_vF8rM5L0}wQk?H!a)bd-Vu(lE#OLDG z6KrDZy}x4%FV!y-z$ZBo-(m=pHWJoc#3})0=KmwjktIk90Ea^kPaXTRP-_&ELjmYz zCMt&%s5>*HOGY{2XmEGGe)gLEpLrW%K6TsuGy7G6`xE1o?6(eAH=ViTQd~HG%VRgy z{qg+0jezB@DB8 zbzh&AwqxGBUVH?qSG-5OV=3gjbh*QACL}s21){{(%uCC^B$vCm@+e{)US3JcsCQzP z3U%*|2-DNt-c@aeEz>+A!_DctX~KC%ENz6$mbVr8>n1_EMUDr8gXPs@+7V1Ejk-Z( zC)8B|5~zuv7i|P$Z9B};(x#CTNkwhQIYfCRLNz|5tOAC7aP~y*x~0)hYba_&DmW#h zeuD;u_nn{g?{TX3VGwx0&o!Jk&aKY2t4e zPyb(e4*F}~SRGwB+~VYl;K@_`$9h<|QR;VQX{%o!%&einxFS*-wfKN|!RV;ml;_x?;zS3IBge$vvRUF32T zT+MMpFN!@0q!%y=?+a~&5k(Tbba2@y$xW=JBd>(uX7NaA2k5!lc3L;TNl!1XngI$U z85BHqvK2(;!>vmZ*BZ?O{MCl44^lytIM;O-K;WZSS53U#zS&VG9r2v^=H%_Oofcrf zKGx{i9LLhOW9K?fjj-tXzSV;V34_n-A6w~?XJ(;l-hlgKW->7-N_mRq_`3*G=b#P` zwrk{h8s&|9F{RTD!d=0>ZE`JTDKUj@dKtShc>jX?l+3aG3fXtOsV!PKe zRfUy;ea#!i0Xni6wGfJ?0jNBHkNFx>>d-Ax4rkLP@elBB4d~y$AqS#3+eu+WC<;rU zz|cG<1u~L1DCncl(KTTJ!)#Azo1ByDrYNi{yJBYj;_ltCcZ*szOxFa$Dk!sioCns&gcY2G1M|qv3&!P#Zke4gU1>l(Z9LV^%v71u+aZf&UOc0Wf zdNqGjlg5p+L1(GBFcZscJ46qmJLc|W!%`YEJggqsGek>FyXf@7A^T+Go+H=d5@z0% zq+SR^?Qxv?Vp(2PS3DR}c5!Km>k6_>By9K=V$MvES-hOC(ih@fhB=zZW(F4KVdjtQ z>}4>X|I2R^KPYFE<>488PLYN3k7<_ybXLE<-5l(JIZJ)GFhFP+4`c$UYlIkwY5>4b z?p6vW8hFYqfA%tUMi-(Y(V2qD@DC<~$$%6O62Cpow-%Mbj<$?Ib?D-l91sj-(xOM3 zbgTRnt7r>8qnqxKAuF4PPe=&Ov#_-C7@@oO`;)3Me_XD_%$wIcB)agUr?pl2`RdbW z69(6>sy#NU_d&M()m0o}+dk9`RmIy)HagF}Zqe#vLC>+TT+*MDVc@x? z{~(hfsyonM)6$+Z$RVXmJETESz;F|92h{~XU%lPr6l?KzlkG;ASsO4_eEc8mp%B#gWSpGBV7~p%-2_GhOfSEUHSQ8 zcI@&I)&t`EjJm$hKd!}S{n5Lcwdteu(9&)Ut^v;-lh0QDa=zR%f3EMmd!IMAexFnR zF+6)>;_&jCA3IL3NlSFK?hvf{qYT6feeOR@zDhNWS($p#IvSzhe;aUZ=>ujczd#3;NFRf7DE(DBx{V_~__ ztGW8!>X9gpI8fT03R`&Ide>gx=zQ-Hta4V^7sBg`aH3MD3m@cB^WU22o9?G0bh@;{ zU7N!hNuK`k&J!oq-t?+SxJgF;JuX^wC zQHniL&oA&$=G=?L#pTl%6egK(nd(1*c9sLZk&tIh&!MZ9Lm%b2aBR{sGO~*jm(D3@5gWIUKOoPPeVK4koX(;L?A-x0rPzNYW@u-zk_ z&;P#b`tSU9Cg(lSg3I@$yXWYwx;DFgPet$CMGLDk??%kY(s*EJGnM| z)W_+5z{X*AhO5mMjp82Unx?eQde$4=XD5mxlvZQ-rkx5_JI*ersg$AVL~c`0bcQ9Q zx&RK7f#fG(g`#Yd{GlWYYm3`GD|e%brTHqNm$akilOd9@P4(U z)3er(UWIjh+IexwH9J+KgT@11zN~WjcE%6geE!Y!;ZhrlghaT}df-|T^GNjvAI<2u zRGR8nIH^Y2FJ?^f3d+?q6Lp#eu^B#{kdSqqTDlbNLLfzyCPVKIp9kZ{^GN=hU8sFE; z&9Ft`Wt+({#&yCa`82wNI;)(15Y}5`*8F4UH%9#XaLj>K8L7KoU;dR`<}*n~HOQa( z{;kjd>(+33Xu9ct-=o3!A#eWrCHA_Nt8Q0ar1`&umPP@)(>~w2rmPh; zoh48>OkTn|Gvn_sIiWuPV@7oAvYzhAnAw8L3S+vUC>S&-;J_dkG_rQb?=1ZE(t|N7 z8xk~z3@Py*bgU115;k!-6Bv<39Kk+`F5t%Cj0?lPl3k8xIhSsxHC=Q*$b!V&+%A&t z21QEkDX{Q=>d*(v>+7vDB{jlj}E{XHOa)7&jfeKO{))P=&Lbo}5uxpl*Dsl=q8CpS_ zpK~NUFegO3PLM(pqlOyCAO0FEnam48Js`{hgl+>6N-DAY07v507(13}ac0ULIr*5e zcif}8YxfaO5_e4z7l1!H?U;3n)xLTWy{?Z*SFBpDAdBKg*pTjDTIIdzkZ0plKTq-Q zsk`$Mf|?s1Y#;ez$BTu7V*AuSu0KU#V(Ne@$LkxvIiWJK>DRpv2azDzBzoY2lcF_r zlHVe_a0!f%-V*TfA2G2KZ!KCkWZ9x&kU(wPB&hKnL=kuiT?z(Lr`Q3n<;93G zV+Yu>zu-+&$^tpc001&F7te@3(MIXSVIgaEAz)T|UG4t`PLrs6x zRs}Hb8c`tOC4!$pUy-09trL$#yZXNtj(v?x=M1>8AfWLGzZO?5)}+<@rnzta!u~fi zvZfDttgM(-J=5>O^_tgn>UkNg8DC+DmBZ?ghU1!O4ztR+XY>T19`9b{?k;=lHEDg#gFA}?%%<$KELu?UcIMeA z&6n2Am59?CN1_**zhN_l#^IEVX_>?@Ti7<+SHU$h#C=j|UuETYp0^X3C5-wT7{Sg8 z2%)Yj*@HrUzGC(os2pWAwY5EuhldZMhlM+{RZJJnUF&hSwXFi9oJRK!@kqLLI%u%+ z`U63?U8puD>~*pw4`u?%6pCHf96fkhUG6$yNMRrDfNHbuxK?EXmSC}@E#DQIkE=lj zU~n~QN9~DI%WsxrfALv;t}oFVuxW+S0Sdget9^e64pHkqU8dDdfGaqVcQ2Ahz#1*3 zOLRjf8?@KzH`o898@eaA#+KrKEdHtlqM2`|;;U0U&b1rYfjh$>?n5m73-D zSK+lj(;%aI^2>D$1OKDO&}0k|N}#DHMqV$oe>T~*W%AecE-`0&&#lrSBpJQG&?jKo+W-+s%L3LECl43A4H`DDIXIA*{l#B` zX4^e8La-+FqHeIpxX(HJVKl*A!5nUb=zx$Dy#>liqn+PViTonc2N(eiVqf_FJq>Xi z(m0Xep_^yRuWde`KLO24cQ2SvyLLC^+bAAEGi%_k?){Hs>TO$VbMH+5 z&bJDjTK_6@K<{vTb4`Tm<731~Pt_$Uq8yERBb?s6ZV(<$)mKZ#FUJ}MKro&IC__`& zg}QtidF2AGaY=wB))O!eB^Veij1^JM3)bQ~TSo`mOc% zW%Ct-Llgd6_I95A?e?5L;37O_1df_c&vk`r5*8dO8jOn_^K;p8O_)2)B`u;gv}wD$ zk7&bhVTnPz2p-SnR6jiG93Axn=p0BHgeNd#i)kJuqh0K9AQ6>d)Z#dk2&RCrZhvYG zu(u&oh8BW91M|AYZ<6+gMuiP3(-8jVXA9;?us^_hI-LUOSU~j7 zFUnI`^Uzr;EJ@Mih_*?BVtS2k7IgWuhm2<-BS?aox!K^=9wKyxIYMzJJRdMI3iSG- zMY{pVC_+=dso*xq(1i&`IA!~l{Hkt#O~9l`bwS<2PDE-ViaY8gQ4wH#9EwgZe;Cj# z6*~{4kT!Pdjji=>#-RX?u~45i?fBHOBO8DHG3jFRiss9+?3P`dW47t^ihZ3oE-&=? z+YN3?`-YmT5})nWn9X01lhN!I*3Nii;%mzq0~ph;arS9-Vn^?oI-lJ2^Wo!H_d2{% z^Hn*++)NE;?mN2bPsD_9i`C10205uT-xgMJ+&^bJW4t<$rh#UR4w*5icTN@GHOB6?6I*Mp$7a8pDIn; z$l$D!4{v(paAVwnnzgYgrtIoWR0hC0)#eqbC<&zlz8Lepupv5=y`#T0lZa+=tKd1H zaY?{mgk}Ohf$+Y6AT)t#0dy;51|~CbH2AAxK?Eug`#lAS5bGja?K~Vd=vqTfmF?GY^FIloKd~mR$l%T{0EK+GA9c)g%M>P zDianp^96t``6Wavx`%^1J?7*8JXh$LjuR^5UdeIWP;$8vpBY}LGCJ|w+)c8u*Pl7^Q@iHL|x zNlZ5!=b}7?#XxoK^eeTUHOcVEh5#&oX7dt#(W zV~#mywBEUYf5pdGiKw(Ou4~m|I@({OPm5K;QEzj|DJ9oC(D?P(hz5n%9juXC-!ANOM$U0%pO7K~s$q$u7MQFZkt_T90EJJ`9G{NQWh1Hc}{b z(6pmk-Vx^E!nz(fdKA)f^vZww1u0xZc}F>4Z%^;Bd*;P5n2Ynzwq>U>>sBwYs>ni+ zs7y*a;sX0=Xtd>$#L}1XAemVJReIVJ-c6vDyJT=_*5)Ur&k|o&X@nVF=O`=+-J68 zacT465etpuRhl0$UtS-KkE>sZX0Bd|?nmC~rDP=E8(*&k$)I>blnVyqOWs*#NwPl3 zAYGD#o&Y(dF&hlwB6F4j4JRSfM@dce%=_%nrbPDt&%nX5%L0|mTJ+^ace-o^w-q%=Z+~3bc9PH+=Ni?p2+ts;+iB6Z_C&wcGgUsOmWz z&4U-4H}s(neL0q4zvHtMe}}i8<!|V3U(K^LNFpVfoIvYySb=fO@LOd?%e5lb*G@D13TC|@KxNx#?&mD1?NdB!_$ncW(4 z%j%-r)ej|Z$yd`{zSrL0owfLEui?p;BX?d{X}cy;kr<9A&lAclkEcI9e|h=0h?xR6Of=}X7ndV8jN&fI_VH;)c%Hg@jT zHkRwnZCkr1e%|ctap0b>2lMx@8gF_&AmGjp^ONy4{jPTNu-y1*T1!PtnWFLL#2z_)f<@;o_Rh_P z+XkbgzKt{a)-?IWyzpI7w)2i}uSKpf@g{Dm$H#P*Fv=~zDw~Sald?!= z<8l8mY^TU2akg?t)YM4c)z^`aeNY0RxOcDv{GGjc++C$SBa?3~0dI_#>}-&H?9SG} z7SA7-UR(aibnJleXSZoFH(2N0atSe2?xN>>YR+Rp1Uze2C!X5T`s#7tim%3I>bZle z;*NXKI8Uh=22fW1G%@7LNzb0iZWlHezxwY(!SA3$E9sFyAyG-ks;-yCiL*cu9F#){ zxLL=ifcPbXmfR%HMjwt*W&)%`sB&S8>k@$pRgE0o^R8|X^(jTl4?O2GUxDj#T!^Vu z_Ht+xb0|M2Tv6icV&YIDiFgrd749q<8p1WeY?t?W(cQU?7h{8CNIYS+MjR0mp~J~U zw&BBsEqq`u%>v|pkR;E^$l&r}cH2Gbl?SvnGI^37dlZXI5?z6<1hqi0Ptn;ADJ)wx zj@(OruShqLdO+3Od-{!LOeS6e&1ihfFZ3~R`<-;fX7}vbbL<2nUfrhKgkO0VUNtFr z@S|CI#*sf)LK^HH(JW5wKw!X(S$4Wj4(xyZXy)j(Bik%Fe&Xbn*FgqzBXb`ic@=dq z=3Wli(>%xL%mk46-t;%l$0`PYK@$&YZ+bu*ZIXbGApuez*syw~Y`&%+3-Ugm~rW zDd{D>gsMQ~_Kc1Whi{P84n;+lC#NP2xuIesd;>o`P&=as>~~!`Ya#DK^-wx%7-O2;o+d zYY|hN{@x?gVQ;%x|H>P1M&sbBEWK<8AmjW)Z@rsO@Yfzs>J^dv+}N2YM66ZLrvtjBdVq z)f>~tW5#a9=FSZ6{b>KJJQzgJhNIYw@|x-g-F>7 zA_3tc7!>?zKh=1FPhG8rVN~ZbUS|vluh_;ptsB=CDkNpkk!t;C1Y%*?UtRowSL2HUnBp=w=HuS zb2NL|o#<8F?SH#;+Zy}tcI^+f`YPz=H@phcb7?Z)yZLg#J<&F?Rw*rMH-+JygZo3E zGcFyVtU>~gGna;*InxZy1rP;dL3PVE1m9x(k5emmb~Bpg3J#*=d6u94Qn4x5 zeWoUueA?u?^lI6B;}DP2cXfLo_?1+1S>;mo)B0;aOc^Sj=dkn`Vnq{4^&D9MW~*12)LQS)T@+a<*0M}Y*$Q(XlF3@;p6=er?^GAHNE*yUHFmb*OPZ& zwsAUZ@H1-Qj}ODb9z69{0>tuZ67MkI!N+)~+Slu*k88VkS5|7ia7Aa`W$((yNB;g; zyS^h;^7QZ557ZqF+RJvI(&{|XT=8kbph%-TX3e{{y}MhX>+o)Bad9u9_15R8eXliC zusFQp{u zyflEO)sf&UM~v}AweUJQul*(fWMuQjrH=iH&_kMinm=N&stAy?O7M@KcRMpQtQTM6eN$p!NT0PG%*co#5=>n}I zpI=-FKD#pMV@!)QpPHa@#TMSK%TllW$S|w7sGUbX=7!P)^T(4=P}uLW)5Qyn#9-jmu9O8*YbIA* z_7&cpj*G>NYS{jHtXhU7t|8*27&r{ligR)ZbE)hVy*a{VrW^=_LttZ0XBTH9SA>ksd#%ugE{Z+lL+vU!&GgcpBaj+$}!$Lhs; z$tH8g#aPYoSCVTCK)c}jma()J#i*WBi&lR|mTSuC3EAd!kq>RV^6cDyBLgficDXfJ z)!DUvl&SKQ>szi*?K6Ww8E*bBR7?ks9m|qQ)^rq1CQIneu};EQh$%|$?+{2<$T_%m zM9Z0GT9+rnq6=7ZrDjbp^uaWMgkram3jc+|T3naP$^_|f3isLq{h+Li^C zce^)v?pb>)kBIWq&Jj109(ND5ef{10z`vHW7bQGBXC2jLg+iMTrQt&|9>#i&HTzSc ztf-LbIeJc@#r^XJ4IRT~^=H^*-;jOdBKCdX{ME?fqk3UT{p|QXkH3~LUGqi3rpv_H zZr6T4ien(gDES39JZBW3%OVm3qpNfPkF^E5tpIV?xXZv^ElY zEmjr^C3>a({{A;bm_zZ2`@o*nD!H%{wu9uB#ZN}M7+&~=GMC$a7%a2m+e5Xt9WS&H~agL_|EJ z1buVgEyv5Fi}#@fo?ikYh)4l(ScsF*_O+$I5^Fe(vq8pb(_@I{88TCj10^Gwcg?C5 zn;^n|o0S*pfO2_u)SAR&rHC68D=+m_5nDWc!BA;m!JWTTZ%T%-jJJm=ze~dio@66T{zEn(=NTp;mdlR?SI33o7&g0kvX*= zjKepbZ{2BBS1cAN7`noOrayaL0g+k$2P*hQ&n>S~yR|v8=a| zzf3@(=#YT)1Z(i>MopW3z1dO&Hgz45QM`lFf138{1E?WGw>ceU+T}jhi$IT{HEGmp zIDKHiJ~yP!=iQtakImJqwufL21?m5%ntkeI|c-t6GNi zj6w#HWGuchqV70r5$?)#Ig)E+%}^ie_n)-;g!J81;`qH!?N~CGm)S$_CS3ZFQSRz8 z1QM7{C|F<=IpgJ+kt2r2Ut9;Du6%9750*DBvL!GjaSy*-SKM0SyXCx)ZzX336*U^b z$swkwkyD6o6oUjFet`n`SI99x$-m&eBMXs_Bm9~$h}XAlk!)`Xs$_4avPAYWEdxkh z$Vk|bOK;8ml@PRI7m*2~C`L(Zy^FCzrr5pO^tCmhaPp^Xfe>wyN|U5M5*h}B7SRyS zm?(&bCDMt^-RGwW`|N1C+I& z=vptRcf-zg=qcNw|7>X8DRA33jUm@DuZE#s~dPuOwnp zF3%@;hj^3(W64;13EXC(VTKY9Ge#NFiXjhyyXsAS4!w3^UF9OY`pcVb3_n+eQxVkw zY~UBZx%8)E=Y5*Eaw^mqDI|rsXURLJ( zH?QE3UR&|}2t4(lug7awGX@B#p^LU-M|w#0NRKMtV5TC7=U99@tXP3zBqUOFGEmHY zS$jNH^O-YcFoz6S<4!5XrY&AIaAhFK0+0yyO4Qto^lY~|GEq|XQB#}7SS-R|4xLts zFLAlz)Nyy*A&)%eag)Y82&}J@{Oh_yzi^Y?O?-n@e1lErkN9&}x%h7P*N1iD8gCe( zXc5?OXDf^3-M_|abW2uS?4sqptes8$J6ppoQWCr??mx3m+0&uZ>xQ$QKQ`WZwoOwf zt3%)yrNC|jFae`>smGBoW3te=Od>XQLa24L+mBk33Iq1W5Er@hf<#c^akgT>8FV@8 zsR+BF!dEUPDmGF6N)!-(U@+kofITJLn}*S25)S8(HzU`qjg1(hroz{7>*+1t4!Fgs z7}MB~{Y2^lZQ}CBF1kG+YQ1D+9Wx|AxP)q!f~p=1l=f}$g@P{VdcY2aV@BJM==9+tPv6ep8H&!_dUR_jC}G>jtca63REfETb$x!Jgm zq*PDw8`6;4iQ*EN`b<%*KkkfxhDN z?5qx3Y_&rM8NF&on__*yb0|^>R?5trMBqoanr6ExRt)nBwZHM=7_EZA z5pg~Te_0MQHhO!x(}qO`n;yKoY1X@=!kbyTccT5@wF{Zp*C@VK-a~EjwYMIXFKvY)QoVg-wgXSg+13WZp7{C|+P5E%@R3WLEN6-Kq>otk@=DUw z)nJ-Tr?cYQaxX4`^O8y6&)uG_LrQ#S)S|vxB#nDGYhQgoJNvxMh5s$MBM~`MA-I}CDx`uBfW~m?x zLlp>3My&!vVHhzB>!FQP596>$2XCiY&5zALzI3+hj#K6le_n0OT6&>xMx5%J>2Qmy z@4dLCCnkS1UoT(2R3FjWe$E7Y=icF~62VAT=pt~BGR?W zQr~J{&Wq5qXIpGbA=)H8GqXH?vGb16QD~$821>&n0Unj?C~l=XXHLNMiPcRsVVM;sqAw&Uq_vw=IAa!Y+GZ6Z3YCuQ8r5xBfFd zou6HGCMTpnJ$a?NxM0Hff~a>fUv_-Sc6t}J-~MW2)A^Ax)`LT44|d$1)uGu@6-T3M z?~*mzbiH|{)rc@WH`63v>BDM+dRr5 zc7EW~YxlNJT|265+bwuN1 zbr3!HBVl>w=kf!mcAfll(HYfl#-U65YN)9!Elb=o=lc6q)xD>X_Ze*YlWI;9R3!Hr zup-O)8>OHO&#T7>?SCm2N92(MT=z4zT-EJ+lyAr}9rJXie#VEX<+H;8r!~5Jc}=vl zTe|=7lU9+LX%+e#haH)IDt?*Y={<*ECKSXk>%hcAMie2}OdWFZd4qA}-%x&G79i?* zH^m*qGvJPlS>kLjiQ5{Oxs)IfQ38UCeW-+TzSw9H0d*h2=H@2_gAHAQ;I_@tQ2c=T*!-wszT=_5Pex_k*#NtnX8cx{m zg-d5<>=>C1L?eN;6+N3b`T)s?B1{GYA1NQq1iFK?;vOG=q6qhxk_s9{j4mQkDaigG zp3Xa<=ly^Cjy=D!$zCC;BMp0(l^to5Qluy{N-{&qN(&hwqa{g$$|xj7s0i&ui6cU! z?#I(Pzx(^oegBT=^Lf8tujja)*L6L?R#I>w1;~?mywqJN2n8>o)spfO8tVb*W}S%$ z4EXp-dgBfX3QxK&WLKPg?6r{^@rBWy%S@(jJ|BBPV|VJg(9jbfPI&7tUKHq&AMx*NVr@6z+-}!0pGxlcOd*{G8hD6^2)SHUivo`VpHkZ za1Kiq^C7CQ%@9%0VBh7li17qUi7V7W(j8G)i&czlY~@VdveD4`))9fK972=92Cpq1 z{E)nJn+&oNAa7R6GQ71ZWT6KTqi$b^EQ277r*f!Wk5-Bn6ZK5-vN4t8iLiJ6h}OnRCF;jaf0~)GB3P& z+;ShUG^~Mn&HqPuCq!D--Z7824ZZYdK*hsbQrda@@JTtqoSKvuU54WPQ%=>pBsbN^cg$$gsfqMutQ-uhYF{=%g_I=v+B|EOdZ9i z9Mb*`7E^S@9pr=*)3!M$Jw}vJT2^p82r=O)&X6qEKu6era==JY@gD`e0DRg-I zynIf*H6Dl-7DeZeX>UJ$n`Z_lC;Pnw@G1OrUY=NDr${^cwv6YS(dfgrPFH@fiK>fu zwAeQ4)G{mpLK9P?dtLfAcwM~thU%-iu@BY0TRd<-J)_1RWAt{r8!s5Qb^HGULR2en z$k;60DC2GSH&3jin!f)eUiuC_FZJAXGt*QTrEZgMBTY&QY$AK^F^MtvJ3cdL(Ul_& z+RiXZklR%-vtadw5V1X1X+DkYW%#Ox+WKu1=e6zF-qLr}lGw+(A@hcf?{#6%!;MDq z2RptPX<}LWaf719%rch+UOI`G+pQfr$8+1l-nb8+nP;;0muE^`eL2R2Y2*L*Cx3MH zQ$Fx*;FuYji_W-e?Q$~ibFop22O;YfDle?C9P@ZY_7JU+^M;&W*Ky00#Exed>4uCg zwUz!KkF4z&aiCVm5E>u=YUen$NR>ZJ(Q4tH2uWK_Eh0_%`oI}?Y&2veHtZR$b})rG z%!T(FpTz=w$58iH(KhCnJ3LwB-EG0;vk0c1>o(n39pI+CxzmQ=2xY6xlgiI$wur8( zuNUb$glzFD|F6;Af`_NbEWq$^e>gOt1tRR3=N(%t-aUCco@+iBM+}$|guaWg3V*cV z>d`S3Bg4|ObkGltgzN|GtV5FQ$IZQ>VBxJG`SiiH?eY; zWxpuz#Kb&(g*X;+TMivNHWTLs@%6>50Ae70?{p$JEpA}QgpmMt!c_W{CssoA3>d?8 zN1k%|2LF{Q!pfId&xZsKn1SVL6reFS+JIMrpy7uEr~yy_UFQk#EbWbHi{li^(!ELG z?~3sxkRh$PG=QQI1QHNJ5`Fb}*PnQJi)TJe3rcfGfHwZ>dw_mEBkOlN<5!9e39c@7 z9Syq51ATs)zSsTldnZ3l_`c5Qnvb>3JcDV~Ej|qjJbN?ziko%rQR_{r7?wtNXF*KR z2>Y)q8~Oa_M)MzzK)w}CTKdPSC z)8jj~KQVaYJo`mYl=No#q->wuaQA@7kN*31-?$umW_Pn3?eDkVZW|V$WYlcvT7wzW zTQ)%oBHANiL6Q;R%FuWea;DB9aoECUjt{TnI)D@c#7<#Wg+%tPm_1=b&&L3u7mr~` zU|2}3yW2)v_QZ-Q8Twl}^Yp?DlI5GoY7=@8*<;RR0zJhYr{Tu~1Jgw`2%$-s_84wL zFZtsT$}$xkAeeM@0q3=5vcMq6;1%wRd(im8lYt0$8emt*&fKMccs6XndJ(s75Zpo8 zCxfav^ZcwXBwM+C^jWmKz@wvR?#1_(G7GX~C%$gq>twS6b;cx+nqFdceXZw-_A%Bi zGdBhwa;<(@@ThOC>ak$w-q)^AzIm~cuT@-zw}tCm-%dV0fGI~$-)hkB&fV!_*85+Z zvM0mE!RKyaA9TpbCJLT+kxg@O59jk~c|kxd$J3i`gE$i#+m-ytPikxXk##`_8;Fhx zg0m&YsX`qVYp*}zm=!A;&ePXV2I0q7q0yWczbVWhprZ1_M6Dt0cFIaRF4>!abAOIQ zPbPd=mG9zfwCe1(3P=ZUkkm zdI{l+fM|hnoA6hKltijy;2%>ZncqI55z$`p} zAnE(y%!m?!v0qk?!vLKrN^kz2*wo99qJ9_BBvip5-XO*bVs>=^;3a2GOALtqfp;eV z@)a(-?8gSXmZ}@qre?i1Z4(7N9fG(N{c9m4a2myxtU175a0&AKfPerIHwqaQ&fJwP zHC3(RkIRzS9iF->?xDJ-;l0~r86@xt2`L>Qmo5HPAJ8)dQD6RMr&~ z^sU|E+M;c%f!aw8wA*gHZlAyXR7CGdleU*>_PX$DU#ewMmVr6A!?0VIRv!kJD}mu! zfa3QotTu7h!`SpBikB*s{W8|15@Z&_EuuK^_@nYa#prQ3tW>;H#qE&pQzlM4D#G4o z&`&w3f_Kt!Kplf=CEl7+g#dTnx^(&_31n zU=azIb`SJJ_B&9P@w!$FiJkT=LJ-NHngja9NoZ!UKn>+^xz@Lm1ENKL9l*~CUYUrZ2R0W>hS z_Mf{#bY>-1tZGvpOx)XJw(^?cR`F}tBb5w9CG0PA_A54EYdb(@y9A*RDefsBeKm1L zmx{YJ?%zXVN`JA{xqC|Mb1YyR+>wC{mT}t`_8S978X9c5;}nO!Y5wV*RkVf!@V1=cnDNL{b5-k~4?LZ@yiTJ8fXY&u;Y zWj)5wmpPWWe3Rm25gaOb@Ml&}A3;O<54n*M7d7OyMUxT4rMWb;9sE9wgsxpGb&^RhjLYmFg zuuPaWOfH2sP70onrU;wLKl`FG7~iJvY&-q|D89@%`Tf>7`~r~x+v;Mr1Xl$NOm;8I z-UwdwV*;gVF{)>w`7=5{;_p8l)D3P3F9tav%VRMa61AzB*&GHdvdx}BgNzZom~Gm=ZCeA1(9KZ5Suoj` zoWyHCjp#f*y_ERH8|FL$h81uUtQl%>F?a!UF=l%A{8ERR=iC2L>kk+;D$d>I2%9W9 z&%8-@5xgguVXD$jE%8S`CXj))e?<(KfzWrr49B>AO`0_u!CnT=+g<#lKtjPp*3qd` z`aR&vh;s|H(T-iZe4#^Khf(8G0A}EZS&&IEMU#yXv{BwvAw9wt&mzn5uvz_-vT#bY=8RMdpPkRx1!FyaKXBb9~%N%VBJXwmaxw~A>gw_ znd_=Rp_#R6AWjv$6o|1&ikUNISS|E)zyd~TOVQ1X6G?G>g{G}**$Za z+;Yf7Wd-L?I)CXTyEk$9YsjaHK^qSD+xztRi;4OZr=FYM?p({KE;IT}bpGV@Yl~Y! z#*G#;n$KKV_I>Ox%MoMNrNwPrk&%Gn=yC`e4^&f6F*u~8qlcNOuN&&Zhb`qssHgN2JDzRNgL5gDRb&ANK^YH?4XOb{y%$qcmY>Q3eQq=dWt4q*j^ zhld9p>C=42#TU(Y+yd&_4c!|m4pBlDZjffFB?RCTFwWY#?A6mB?cM$t)!>YM%YJXt zL&q#~59-yb+4hjAm*XZM9r>cR-HzXFN8k#T`}@<)2~CZB$1h*AM(%iWzDnJNz!N99 z>(8>(Ya<4Xu~nBYUD(fJ*1rvU zfEN31co?4EP{_uV_y@xj_%-9A-M)PLrlh2#+auY~eBrMf)lPp5byBCa9XECD+O<1-ZChn-{Oh&* z&6Zh<{BrMyJ+spb94Lp_Ud=7``wjo2N7wJ45IW$}tHAtp$4?s<_WI%VCi3MWN4wvq zbMCf`jxI9!b#`^Z*`LJ`Ti7@{SIzKhN#4)e@oARoSW}UUwNvI+rzc-^xO)AFz1H!R z^hG%%o?ZN8GD(3+X;9cI*PQ3O%_7RE;LE6Ni`jk^_srs_sJF-;NMk$fyuJ2<=QA6X zUNi02uU-0iRloH1v#3j^n;fJWz+I-s3-g2eBOx=d{q?t5+49T9%ikLHem%Koeni>w z8-1-z#%}E&+&|{o^q38WVRqN*uRqsrW%laX>6Y-^}La3y;*5xBi-xp?jn6q`EMl${W~$q{5Kcc)|0}kH}9cv>B2d zqwGI>ib>9aUn@rKvWDe?BUowsuTS)WfgUU#A)Ne;g8+W7lfQLId5a zy1i`o&r``?bIc*0nEB^_$@#vy&PD0|`|UyxytGNFUF@#?YiP?zjdjER@7FE!n)mqo z4c{ACy2h~?_4SHG>ZUsksQokGnst<}i_VhFF~Y?_aGfSp0HX{<~s~17KC@!Y&OXi0SzA(z>^Xb3f8nw-UW-=&DW(b4Yw{U|Hy3r7{3B#T zoV;=2!x-vs{B2oA^r4Mu3?e{b(2=GF4tWKKNd6b2XgjWIMY)HY*2r|Hh`2W+a@}lq z*F5giykWNh!;-QtMep8Cw-=X-PRS|vG-i$(rlL{;8Y`=_rM#fuhp2TZtyEBfTZ`(` zkdN!0n>k70VtBaj`30bLUbC>R;mZXbS)(4n55kG^*m``}eNY>22UJtUFdH^($cgQx zH)~cgtzA%COVmxNmUr^<%v!!w{r4d{M8z3if1!Q0hu4BfYt9F`jxhbKsj(w21oFk8 zas9nt_z!-x;;%5X`!&huLfUmJE>`q$_w;Oqdvg2!rqIIuFdgKn!%{4UI7S%-eYUM= zVh}~je1V${eWWMTJAA?PXd?wNudn-5&wUe76vKPS(wD-+l>icGE;C+%Y~dP+C!$7h#b8rQW}TgNqY*q(dYzaL*OOP~Hl_u=M{v|58_i9S|8$1L0I z_WXZ|N_{gMI5jyCXZSv^y?yD52gdW4b?Z|bxMEgFNOV^(rLWG7;%YWGH*r1P(eJ2M zxpRP?E^{3yDngN!Eq1avNp=di>Z1BL*uQW*ptUgIue~J{7qZW7mJ7)&A1x&fsV3QM zA$TddD&=K`wAQp(P-;f4d)pd!pe@+Y2)E^5OU0E>&$SS~IbJ(k*x2sm%kt{|V!O#~ zL;OR9rn0s6M_}iKM-J4K>0qJ}ZM0(IetYd(*IKu3A#OB)sHm%h! zZjlGX>M`o^c;?sT@i~Si-)Yi3e~qqF=68uWmuAe_;5I3XL`n&}1Ng;6yMaOF*MnFB zFiG0@Y-j(}Qb#5MNK)Uo(Q))iwG0+&?)PK<2Q^BXN>@7O7phUgf_P^@9=D_Qc`T-VFDufO; zN8g2kL8k{+`Vh?E1u9=~9TBO@_B?3ZM@e;}{G!O{8*DYaf8>^!UAM14QUR?QjvFEq z?NLtU9r33Vb%WqAl!9hvd75g5*p|?5(r(asBkhC3&bz3*xxheFUQ}n^19a{>#14XExa+2>lmEIM# zC61&1D@0&bJ9pATMooDyBP9fkSZV1g74y2Fv7XJis-d6nZfOyhJv!mux#g~jrFEeZ zb)X7cV6Wy$y#nWKeDjSa-6YO&Hx-5X*=t(CbprQXV>7Z}=BAqd#FOT}Qb;2zkC#B0% ztuva-W>4wvl%P`s!&4bzE79TPrlJ-?3gTg{b#jvJ*0hIG)--Rvl+jZ4_#dIohA@!L zDJjLjZ_l1)6d5mGy<(ZyKI%V=lPCt6@z8@egY?(Y+JW;ch$`5V%-g#tIBNHkfV`!S=oUHqo%kZamoEpf^H10iPkXE=O`~lDH*62<-%TMYUUm zy2ScAm(Sl3TNjm-B-8YYj5RAOOK?|WMIJK&Gsh8i$S96)+JZ zh}3+U(|V`m=bPX3Q*kopAMB!Paw{ce_x)Zjt$}+4nb^6r5x+%@pybk+#ID^=&jXCw zw&24Dlm3Cs*>NUlMkB1#ZS?5TLN30(CjWq*(pi8_@#SEBC~;bX9F@0hG$%&PC*jf1 zPsegsSe85bAZHZWW!rII70#SF!;4vNZQZPP%`H>nC7)i*9eAYzq*Lai)E)fRYX0J| z($$9BW$)f7x4N;69>j$Q;F*K9hnGX(kWPi04Ynm44CDc3W*+|f^{b-NN?(muEnCVH z1W;khxzNyX$T3Yk!#qI(*;UaE3pE&6KxA#%ptG)~%8K(Kj4i(d~*9Wj45OmNm@ z91Ta%k^Y-!lXM8aG$boV6|$#7$h03m$j2YG@na95dGVkj1+Ih7Eqjf){r=(Azn9IB zC&gP7a6~q)(P7pQQd3;jc^L(ulIPiwfSmXsR-mHYrLUjMNu$cg6>b+?S7BUIs0)7< zWP+cZtN(P^)!yh!W*^h&i?J&=eFP3JVz9u|l2z-lB>fQG$`MOha=MsDv_tCP$ECVD!b3o5yITF z3v)Z*Y|1O%i@^BZI9t)+@UwE`I(Noiq=XGR~|KIwGrvbrd`g-N53~s(-ckKT9xDjqqJLnf)xUYe6;Xzd?pCp)h<*WEK0Rq{97|h5u45<% zF77DV2XI0Rn&RV+@xo340Lw~axRH1g-h5`^_oLOw(W8q#eAw^p-C@q0IhtFI;hRGt z9Ye(<^_v_$dM6sh+kkko%S8l8$QRDrxESVq5=<6?L?M)u$zhA1otx1CggS$IOs0;v zZ_8?O*+L>yr2YG+M&!sManUV-EGXyH1_{?7gHn7tyjT)s;_4{=plrN^yj@kb`;s&ln8R{4C*{U_K=q z=;3iPj+NSwcJMr!Dp7qf)zrT>DJE1&X05?jm3bm5?ji7cl^R^kwT#P3Rs7I-Og zRS^@B?{*509MhA3J*o$AJY*#bq5}D6n3c(*Int!azIu#YJMpOB?WIe}*?52)Ld+%q z96*3X3=w(2b4Rc!Bhtko3A)#1Qp9VQYsAzC;_yUZI95(CrQ{WD4TDoLR1lbp%tPPJ zJH!kw&$eXc-(7|42Eup_AqtfqTPd)mEaBak0k|uT9Er7i3PYCrAQC_5`b1uU68=G{ zqljr3>n4x(_3`;kO}Xyp*Ojtx6K7ljX2mHGeM=)*30Dh$nS&~_GO9>%;~?oiIyK=G zQ2=y&+soBT+-w4D*55bUHU7)k*!aIab5b7y84A4=Iyim)EL`6qp>mfB$%lkX>ed15 zqro`z5ECC3XbX>i^0016CHKr1lz@rBe)MdjyU<}k%woqMeXdORBUJJj>;x0g!1 zD&><3rHRWj0-)$XuW=9YbmH@H+kv&g0D=Y&nxx=A!uo%3a)ZJ0MMK84Q3OIHVSHWI ze|T}2cytx55qV0)OgKM+`+nhcVsIop?w5(KV~Bwwz2kyp{~hVz=y)eH6AP0r+-fwX zVo?aU95IyWE917*sLMh;{s5~m>{ZfWG5rw{BK|vY-~baX*>@|hzpNf3_wqEdmL&N6 zFj$*Y?xn4M)^*I#`S}-%!aJ2zqx!r11}#^!c-Y9?CdI@k=w0b#@x6QZE@g1^pJk%B z^S!yDy|((iI?utu_Z53G35!I$hcKJTzUSQP>on|gI|2eaIcZ9MT=}dij0IBRnuO}k zX#6^NDhIKbx>Hu@2-z6=q(IL25m^9&%Ws0cTU=3*ws7t^rwcl&p9ishSKPwk9AkmH zl2GcECFl_2P`GhR`AZPM4x>ip0d!?KPJ_I>Jb90#PKr*3e2k^Naq9a^C^}3l7?Add z#w6gjzOu;EIQJA+FdW{qB9)?osDZ!CPOJnZL_tW9$RM{LR7@Pj08n2#SB5s3B;NH* z4W1AtKv$E0-m3p_5QXdv{t5{0$WfyrDT-x@3G)F0hzx5nB{TKj0Qkp~RnT7hwyeza zqzfHo-9gCzBhYk_7@BQ^-yl1rWqJ;OHqIo2dX0)_DJ>D0^HVGbY2QW6gHMYk-1Fu; zCUX86xI?2>XHFjdhpr!p^{B_pW7R6F8CVe! zWE@$foJ-}tJ>>Ceo=-tZ$*z~HJ0CR8N#YLj(S`YYJe`6I6nL$pV-Y5DK^%9`E$<^o zCX-yD&n@Ni09jDbty=BulU6EvO}Z|nxhM5NB*fN;q$hSO@V$`1(d1J@Nakk7!SHWv z_49={smwUP|8OfHZuKH%Vf4WHm4Gkv9&mI3<}#+JOG_Rc4mFDQ2n|&s=*QML)9 zGJ4BbETW2*a4VAx3S|y6goSgk@7ek|0*ti*SoYjGa}LB#eoyTSM#%Kdyel*@s4L5v zf-pUn=_B+#My^7H$6~P;P^{uds|xjswGIx2bj7e;s$^wxyLQ_rpDDn5XYwU|W+3R( zPlF;;dwu3B5Xv&>!37frC6OkROF=ip36^3OwxLIdktojDQibkm2JcjivqXMLL{o=R zE?fZ#PTa(&Y_tRG1m|2vF%TOYEBjD|W(Y7seOiQ0$$#BOgU0;s>J$6BHY>Hj zSSw&l%&`L+mTq5S7G1Az4PY+YH8p$f7ttF^C^h6m1QD6jk<+LJiW;rMZkZRlbouhT zWEJKIq=|b6dq1XnA2j}2Xr~VJPiWs#0=CJ>o~jc`+W-FdKZ|Z~RA(~N$;ikkY9KdV zbelzK)w(0nwDTw>4YiwtkydKZh2WqCA=95vJV@ECj*iN3l}m9>fEe_ z?{{y4X})>(uXY3K=4+f&sP|c_<(565G(P7|sA(f@`sq9mJtee7^F=H4!hVPrVa?KjG zjH>Ciu91V!_;xw1NqK-Dk0Pur*)hsOEEzX)orDnqG8gMmOd{eVGvzr5ahe$)w?S0d z@+VZ<3XP?719=+d`cngIGYfWueaKJhjZzK`2QQTv$ab_8F_V@Km?I4`c=z`0?Z^ye zpB~*S4Vn-mjvr5>;3oTqJllQh{G4pH&EBa~QIG+6h*7MV*?;^fsz<6cJcvKj+DSs( zTK{`vroAZ;l%b(GAxj0KcJrTxTo8V0U-uF^StuUh<4>Rph}ovT{?U3!q3`vwP*s5g=4eWe86$=;&k93ampKDr%6bB2P6Y<;mN1>^7&BI&xBlk398w zE`VT8*B?tdE)gVR^@~2fzp320!~BGd)j4`^K`(S7ay(nsFm#$edv>XCC`PT*HHiZ% zC;u)(luPGNEdiV9FPx?s;)|djr4>nzg-*Lk@HDv8_QUzO;)9BTGF6JeHBvvqB15uB zlq=QzK9|2tc9i``JPs-3=u0HE@E!f~^0^7W zg`y;Y==cRfkGH?SxRpsIC^Z~BJW52Ovh2#_r+2?dB#}^SiCY|rn<{YJ=)mxMCmXlI z*on)Yb57E)5xyTR+xPd(+Lxr23Z+ z4#f;xnjUs?<@IyFBNkj;(-|)f--ZvSYqW$#jXsksON{Xt;hCKT77)n_Tpk~;Rh=rU z%$?s4?5t+?2oRe+0B>PPq|m3Lo{@r<3P7}L)J9MWhU0QUpKzbD9vY7}(++Cnqv)xH zJb+FPy``uM1;b=Tr)!YwnsfLj+tPadC4FTfFZ5(}Ci~%thFc?)irBXi`JM zznz!Yi=C@`_B0bD1EMGuYqL0$17TsqIObZQrQ)L}SWwog%_WDYtvYARSeJYwpbx)| zsme*Xh^l5cWhJ*`To3AN(RGN{lq-cY>*kLl&!R^c>BykI0PG%-E;Xf#`O-CO9s?Ea zXD5e*2j=+=IDO($fd0XXH<%Kz&a~%iAU3>O`{|RbU}=| z*SjGR4hDyKB*;w%4&fxwXc8lnzpA`d5E~V0X*N|hk29HrpEd7Uz$ebKIxmahPN)C~ z>gSh+MVJL$I-y23O2u!ee8|6uVZBW0Nea86^@&2ap=^FPF*OzUHDN*0KLzQD!`+I< zC)0V=Lb=Cg9>`0Mp%8a_^yq;=KhDcbyyuDl5VP;L;*=qxN>xA6krG5|s15}K zr|TrnJ^^qA)Xcyd$Ad11NF8?gq?cD4N;w8s`mf!DYj{k1%^X}fBzFGmG`6n%`4EU_ zQWfDNH;it$%lGv+s~!%O@=xADg4ttIX!4>#vrHv};IE>b@y*!M!q(Q4-@=y=n2QYr zoj91NU$I-KallA1{QW0R91sYPMCKVx=!1w^M-r}2T^^gc&m^rTCm}qtaA;Wn&kxE+ zDHWHu-dvfK-FtUder|88M!mCoyOrVxyw#Z~_&s~;lf6xRQo-_3J|Zu5X!UC9P_y|yY(kG=!(B{lEf5q?y7IVb!uBnAfL*id>re=*M=p*M9tc?HZ0C1tXeSw%jD z(bOC}#t0CVA?c)ay9tojcoL*|H0eX*H+=d2z02%wbliiLmD?vQrpm#sK(L|?N_q;j zTc}XtOJ7NGN8StyfZ7R*m60mpoIgT5?8R^r>=;rNEJaMV%+2!^kE;#~?&4c&hmkaP zKROGWRSf(H_{NI>-(s%*MKP}Bj)|-s0kY?_17f${Hs=DghgP&IkSTCOQb|hbK_Vv} z3crtKUU-0TWkuV9`jEsSzuH9g0+khihjYMOfoIdlFD<1iE;hEHv~*z8h^yCx_P3*t zNz{&DX8I2vFt04GtTa7a#|Nbvpm2tL3i1{?ZjrK&|KiHoF`YMe>evylNOu#}`-Dbr z{`_8@f0<8G*aVYN*4TniH&Q6oH#HrW5p48R50yO+oOsLfIpM-kdMz)!vzsfjZ)iin zNFh=JFvi5h3_gheM(d_i+C-hm_3lRMWnw~?$R*P(Sv@@8Vg34!pGNUnsF;O((Ww*N z5OWb1uc}r%M29CCZgIzI=W)Wjq~4T~B<=dX--%bQ%)2>FWl}QW0>x$uua>PLMq%NH zPnM47wcg3fTJ%AivqMi;Twd;xkT8x97IefI-}bX-&-%-pfC3S)jd(UVDfjVl-W86J z7nitr4)83r6 zxm*8wqE^RC>A4%jtFMNY##XCesa|bvn$$>hY_BD!Y)57L*Q(T8TUt_)&4T_CShqdw zS!l?~>Z~*2wG~IERoZw)f1mVxk(+I~w@ziPZO*IFYV2o)xGe}HY*JNnjONWQjKS_* zo9pU&=jVy|a&vPRt)nX{zlF`}wWANDLE-I*TQLq!{(8ZRj+Y}`t<<{-c^oK8LM`Lu z$h_J$`n{Y=jm=#&pEqq2Xj@1?vS7L;8=u+G9XtQKrK{^#-SDW@bzvcMs;gi9EU`Qj z?wp-CD16X_Ax}#tcu(k9c`7HuyuOLHd1iRnQuDc)*FP?4qnsP_)c?Bb`Gm-EiII&O zKp#4;H$~07VqN*qgz#b~yXm2`P1OqTnHcJ^`yP@^lve30>s+jaLX-dwrbbnA`zb)oMc*hOU~9^L^YI{me}Vo37uXOS=J>bhGl`@G~Y z?ck*?c5Gu1(e+TpC*_cB+qNl2rL}KgztqyLDrU^Ru>FgiN1R&H#%rm)Y&JXkTtn4F zFZkTGukPWdpw6&tA{(XVuHFC5@rPmh*Xmd8t_b@Y_|AqN zCCy*~*KJVJ>Al5+=FH#JXwK)2p#8L{;iI-{kL@uxaC8d5m~HH&(b%MsTYt}bAB@rk z^whLCZgSB_!W)27EJF)O<-$4uj-oavR%=dA zibY3EOIuK_@bRUW0cjR74ZPPAApS@i(lMN_z3s0?P5XGKO2VaCzjg1PJD|-|IyBpd!TQR!Kav(J`vlDzkXFr1LH^{r zoiRhgKdUik%#eiwvca8SEmmqw1r9C_2`db_fx`*98%H{7q>|#7D@M=YP^@{Ba6?EJ z!+OdPBHM5fqSRr}?AXpWeBE0;DL#qKMq!;PqBHN_7g(8D@zj*jhyz3(0RZ-d!O-wP z_Z6KhUM;n%u^pxAmnOLRjy_ND7*DLNnnhq`r`A-jF7qqX=jXiI6|Z`{dZL@){`6vqs&CmF8kxrwgLA+kBi=Je-%c_1 zjI!|~me3XUd8;2O-HQ*8s!NLO-ZH`!5+%(mYg1Z`8FRxq?Jt8tuVsV{ zTF+)GS-~a_d2CI>Wk4o>e`NXD^kRdFEP2h`bY>tEZq*MZJJx!e)BbKLv)-N1=7 zW@d(ak^>iqF1ZaH8Zg0aye!P(Uy^1M#ntd-cN)u>2v?(ncmZ=Nri3jPR~pfg%i>24 zxsa|Q6^V?cRN7+fwDE?6H3h1@47!pOlax(WM65siZtmW#A7m1+ABxrpG-1R0<$xzM zuq;O6E*oWdHK!QHiuQ#L^*$y+RQ5$o5ygNQI1l_kCs$Kzy zxEL5OSv)yCNe^2XCd z=pst-7e{n$O|!l4JwFPLEpr`JRaJ|UDfFHgs9n3(kv=T5cQkV|2I94hsbtK+^A^H1 zKY+szl>~)0e+zVAul-l~2!lHY)#@pEk34ksL2A+B%%b41YuEJGCm5KTdYMIZxtm#5 zQoFWW)@fK-EjJFO}J;x7oj=uU-3ktQN>n zXX9@8dqI<}-Xx)+;K5OAGFL)oMh)7rzeR%$N5vnCOXITlrk4tIygkW^Uhmf?>TE?0|F!>F+Mwp7420TixCDj&5rC5^K}vB zD$eLkkr4pWk7A%Qh%qR0WQ4lX&kbkUVe+Rg5>UUq@e2lm={bH(biF$!?@O)EKS#X66PG%-BWC$~rTTlZ;|Sr~}p|qin+~OaCHkilKvypUifu93EevQVSB3Q|#Mh>dkgK zzc#o}NDA41DBOK?_Ms!kJkEM|Q17OH)UP0N44`I^O*Y$7E>UhmkPs(Nu}fG zA|08-AX;kFw6%h(xBctbcBD%_r97c$~^O^*#UG>qMW=Q7>Gh*6ZTLGkg5(MH3Ry8)O;*#c}j01pJJsAY^d zhzNsp|AGA74GN@44v|dko%w{h67uAM^p|);G+{Ey1pn^tR$T;B#wsDGNjL%FFHl)yASP9xo1TesNvh$553S( zS~J-%nE}flF6qKxy>{mR0CL#VuVyr<4HC8^|CA;G^6rntV zHtTG$e+jh=1dd%Mv>*l4!voPa082!w!O4`ONc1*)cXp%VfglwyYMm1Cg`pH#OP7}p z7o-8^Mmx#L%YbQ2RIM4Wvr42Dwkd3G z!ZZF`ehuY}j*@q~&Yka(j+YQExQ?>RNfuNBwhiY7jK^EG!RME&yCn5?7d~kz(oPf5q(zu?@w+ih7OQNc%_m~mj3;>)x(Zd zxS18z5}X9vqmmVOhtCOL;d+7UDRR7osv&NQ+{gZ-M<3&A{zG*l`_Tb`#VfaO-$|`i zEk9400Lz4GCFR!$;xGHs0+`*%HilHoAoy>*BB4FwPmn56LEzOv5EO0ypuPwcT}x4V0LMMV$YvxRJWEP)%v_seV+ zmuYiTN%?Ei(WE!~zj?i|$)8hdU{g^4Dts5z*<;gmmtI&*JquHNhQ7g~XK@Rgy*4pj zGdM85E@te5VQEujIMlSMDCfa?u`Pxn4hKaRkw6pEpIquV3KSQWu_%Z+LBcFU49IkT z-%7cWgbE?NEnT|Q%aIhz69^Cmom=g2B(VyvnTz%H_vQdTJYoDy@_&J(oRV$Os8QXR zijY@HY8Edx#{TTL02W5UHUXtgVz&XSa0D#Y9;D%*4D#onWXvI~M2^iNYOnl{!F4&y z6NW#kdZRw(9Zfs8Zxg7F*pQQM;~Gqw>cbE9YjE6jUc>u0Vn*S9FVznMg&8Y_}k^%YAMZOpHH7L;|%r| zyv{G^)1$B<;-^yl=>Pr_CJEY@<%GsqC=Wyqk`>Lb(BKIr!c7;+en=L0b0ff1kq<3h z+6M-U>`#Hhx}Pox>1GCMx^(JMCYy!OoJts!4bs$H)I*dalrJ*Tr{kjflwOb2g>8e3 z5txknlX64}C+d2BH#KKaPn%V%E_Zz^R37YZU;u-m3-N?qz%%s)J)Z_AT<#?mC6%H& z-&A;oVvqrem#?%OzQ;}MeuTILG1GJYz{(FF0HlRZ1xogZ9m)s~+7hhah}Z?dirvO+ zP+@4_SEyjb5*bb4av|C=2(;v({^ZjU)*0emDtn{I#7V!u9rr&)xq|Kk+C#fQ8fW-f z)U+^Q<@pK680oxl5@lvV-vz6`OQ%!RXuzhnV~ssMJcQ>b`~+D(Gp{j%7M z`yiuY#up5Mmva}P-$WsD7D1{sN|}B_1?zx zvFX{&!A+GPW@I!i0P&L2u!vcdu%*O=xlM4jT3N)qJ@xVFhpKBb6PJ&8Llx}n5Sgqq ztz9q0nR~9={qy;o{nzRpede)Ljp|ycLU2BTn)kK5F8l`=Le$0xXHF0w6o}Fc$bd;E zQ0<|BS_a~aF3Y@!Z@VfaGYCh9$g(#g19?&|9qEzMy-a4pJjc*>ebeJtJJk$9S|Z#Y z3}m@CvIh$Bom_N=Ib7Y%1iX~#)^LajZIQIoRI|d(Oc_|k{mA6v*Zs<`7bHhIyuE8d zc3`?xQLe4UvEvJQ45tGI{a*uD+3z#rfa9yBgJCugnasl6mN8AVQMwTL;S`INIw|2q zXz~SnJ@om?;%B+l>EX5;jco<6a-38 z+$DO`zsUqlxUtu|XftQ&?|xyw7c2_NV!T{bP_T^=#Kl-@b0#8GCrZrbsMRpwnZ+oP zyhI!Z<(|UeTlS<9&{%=A7unL@hr38*%p64H&M(CfgM!}*dLFf(tUhq?Im0kc!#2A$ z;~r`eq4g5_x(g1%9faSt0JafX7pBW;LJ4!^ug7Lb^Yz6-hL9;X$3iU!ngpbj;uTT9 zEZL&5^iwkrkMujIszxFYphb{{QhR6B`yQ;W0hsQqtn7iHLk709H~Vg12A|Ep9*l79 z`t>EWZqO_Cx-3w6Z`5xIolinh4Tp?_2Qhqzvp;Zs#9;QR@pH5fxIIvKH|FjR!itrT zo^7)(?XoqH4*5pjw@?_9+}vR%1HwoIDkjWlSEe+riioUC^6B5 zF?q*QJ?o13_10TqB5eM5TWtq717#nkxT3vwB@aR5JfZIqE6r(Jjk zKlH%N^8(+XZ;%^+$PxT+YMZud9BPuS1Rl2YaU|v6qZGhWVFoEo9OMab?V6>~Dbu7E z78myqzXr8g7^n2z)8Kx|2jovVenn+vgYr7ig%N#CzO0UV@CqgjKoq%EkMcn%3+o<1 z=>#TW7R|Fw&dB(9?8~sq1fnk+dMD4A(E^o(&GQwr(wfd?s;E1Y{tiT}s-;@O*Y<>y zv9Ma)jJ1!(Y)>km_RH5~W7O(vR$gPXm#jK|>9z7kRqb8O{3C~Fx|JJ&5|F0vvoyG| ziR&%NC@dZW6WClCo$~_}QK !A>W)Dd;=(_ppP=I7VNF83Piy$22`sjFYreaRROY%AjPovXX*dUAQesoz3} z>>Tll3(v6??E$jghez7j z3D7LC9Aq~hznB9;TF<)_?0d11XW+8RHVYxrQKrcukfAH9Ab@k|wROfm&d%=aojM>_ z!T7_ptW}!)=b!}J88h~c{Bb$qWy@P2W)y}FoAWHJmHpCmjEm#VCL>f+HH{a%RUQg5 zj;UXQ0bs?4~?E5Bt#R#aoC6anj9^TE#@gqQ^vD>vcQU}V?8MD&1{D@a+ zeQxH@&{vEW4p93E$%tX{T`ZxSw`|$|@rdK;5;izj(zQc*lt72WD21jldw5tNeVSUO z5CnKbvFLR7d3uUCo-%fLXsn59#Jam@DEQ$Y0N4H5IK!fNNC{I@F2Ix27loGE}J&R+T33g7bBH&n^b@kvJLfO_~#|K0*lp; zyiZ_Xu^0yv{qW%fzCRsi59H?~qoQctpd+JDN3629*Mbu#PH?Y6hyPGU=0fZjww8Dw zcFvmART z@Y_Sc`@)Pj$pn3wwe=p-_n-^mWs#2-_}*)J;;O&nlHO4E<&nc|ey2TrS-_d!{P%6lGSs{M0E#6qF!{>rp6Eq_|sjFewrDMxVJt-gQdAT zN#(d2e<6U5UNgrHcnug~Dc_a`oiFl->)5H6PCNd*`w4l%Hi zXr^tmRX7BPQZ4E7O;wUkojNr#$S?Nyl1Dl!OFmufBI1d`_IgaoW%dqv>?!>q|JTJ$ zOETIvZ{CN@fjiA!LG3Bbsh?(&Qb?hKD@i1yHJ2#vkiCksxRw~tclno-StL0h@CCiA zs18qVshWYnZpqm#*;Xb9sXW6Nue=e{T-2xl(CEtVc`sROc)Yg zptxR%kvjbp#dk6HzL-*jaeznlm2({HX&48=`fq3FKdjPn^Svc80q|dOEW`>{)0nQZXC0=)MTo5X?fiOt9 z+UEPg&bxCI!NJ6V2J!a;8DUUbGc1PFD7`G&0rFv6KD(f+bak@Dgx4vo42tiB;7>*! z9SBmWXoTj*KmV^BY1imqG0}sd=e89@NIHr1bSn@g;X89~XmTU=O??N0na?Gi69s|1 zyPyCQsA$qpPCL*8r8Sc4rsQWDjiPOjEZj8#Nrh{2q zbr%^J$RwOC>(e1=;LIh|D(WhzRTPcWytc|y!Aj-_Brt*C>oq4`3XNUVA;L{dy~HVE z;d6&&t}U{)@(*Qw{_Wg5E-T-CyoYn0AC?iD9WnJ7 z>rm!0U2W<0)-7+7808s72QglyN_~25zqhBC^IqD1(&9+O1gL|> z4M?aTY7-b2A(y2>7NR1bkL)DjOW( z1L|c+LwCGK zUm#NmJQsk1m{7|68GZv-N}Lx-KY&G2ticy%cSZl;K>!g@MoCEBx(#uT!2$8yrmZ|O z*yJ32{M-h_=h@;28~a)O-Y~7hoQB%~MNT|Gsnhq+67k(NJ8A)Ob5Z~viX`l`_>i0)H%93+B`PG=u&(^ z0v^Oi;RzQN7u(((suHZ|>$OehTL#1Nn^-B{Pzaa!JHo?+@fx}EnG(bc*5O{@REu3E zN5Ip=Vg1*vtU?^Y=utiS-h0~i280+6NgC*OC!;2w_7s#NbY+>X4wGeo48@iUNS2_f z!|ZG77<)*%az6l+kZJKXm&0X*UC!tLwv&hYhyXnpk8s(iuA9L)_}{&nR6vT|yS#b* zT4sg(B#FL#_wGYae&T##?2J#mBQ zFRp8eHE+9%q{h45tX$vqg>Grrv#lb{+J~Da=I-p|S+j#Nb(+m>$4AcY50ALn9bfPx z|B-)noUe0M2I?G6L~4+Zrwx`Vs3sSCsT)2Gu~S_>YQ+%q%7GQRb`~r5xaU5K4^wdp zPg~^g^d`*8z;8>~s|WF}VO9$b0#uSrf#-lneF1mGL@@Kz+R8eM!#k~3JTc0Swpuvm zac)`8H*XWam9tE1jML)B2N*VW&aPbX{(^6v?WH~E1H;l**Vvn`petssL_~wDBdgNj zGC}1?ylAs%@#3qXOYBR0z>J3O_uz}`*)_%{V~rvXEy)cz^V6`zW?u_*X~OydZBa44 z9_l`HBL5yTk%^@G%^>a+z7`1;<(}E^bL!qWcg|g)T2UWXx*_4)<+K_s_t`rqR-y8Ta+?-(OvwW7Id| zcS7HRwazW$Hr3i!pEY$+t~0mWHK4Dq94FA51X-~kQ210l$G-?h3*QZ{$&^Yevsv#!{Mhb=KS z=W&-0HLm=sxGL&r)#|~&b2n$@Xs?P{Q+fT;tdCsaeo&VPVZ|jS>%CgctNa@36Y}$+ z*_g-f5A_Y=6dxw_{bia~6J5IMcT)7Ixu#a(vkD>4$|^Z|eZY$FL2k2>VXB`)=Ptfv zf7#r22D3q@yg}LdVHNWY%rARITdbTP3pcr|hRvZ^$CYnl1hw0=>4#=@ScqD-s$cad zr)VCiot{fpPKJ|Dne*erxhr%0GODXqPdBu;vHFr9Y?(8qGGR;1Gxy-kV5N-}`!`e` zwAx^NEYtVLeMjwdn^~_-R9bGU){979q%tBb+MwHjRTka)lvg`bRFwDl+i1-pZ_6Q{ z=N2FAR{5Wa*N6<=@ChNVAuF!g_6|v(eJV42)}*9><9&xUsSCL=Q*6oX$7IiX*x8WAG}3Hs(o;;2y1r zdiT!+p<^s^q4E$eKk7OZb7D9^#j>}~zE!JMFIf#m91WWLj4|dDx=Q?oQx5%u?bS4J zT8K~5tHN6aCq`H=Dqh+}X3>KvqHf)~HRgHkzwe}d`!?lgHl8=l@*12QvoJOBQvc3K zDS=ZdUdhm&_eZF!^X$*i8@K7At1$iXgVQsXHUN4$a&-Y+7_5PQ!)6}SoSAt&Hu5I` ziNt;lMT)3y+V(xv1NP%SBQ^!3Z&^6cbmcVvl4LvP8`xB_NUQ}8heLRYq=c*ZRZwco zm^Ev8W{{5bdYl4bapB>ma_oSNE^gi&OO;1T>N{je3q<@Ng<{ji-={pd0;n@w^4NhpR23Slj8;R7ETrQfjato@ZrrLBJXz{=d3{~`d0jXM~#ww)hKJ{QtQcR zXS9W!^&4m2>r4z)&hc|+gJgYkDDv~38QtsY6`f* zHsCN>HH&&cmshZM!v@G5f|#ydt1u-mJ~p<&-s8hPJv|K*w~PXLW$cpG{KD&bfmBuL z>1`mGG~fuJ3k_xlLygrymSlbXdUQkle?ES%XFty#_36jf#Q0dBkH0^TPJaX$4_K{V z^M(N1UJI@c!`kIpu+z{j2{#?)y~0IF2eEaqdW)RO*d{U^=Z?SO( zr8<2G*W-S6_BXd3p@V)N?bDq5Eu$^&i9h`=vU5=EePwiPo;j=I8kh%LZgGh1;KvPUjQKbIhxx1`hL*6G1+sj>_FKl5w-%MYR$n*FFHLH39~aN3 zy}%Io=CNxF8^TTm#9UkZ!-0!0*6aH`Jo*8($`p&qanQWMsFZ0GWJnMXfNNoGE$DJG z3rsQuL(o&Ah=|xoIaFlC zt5>t#uxkSud0n`GZH)H>t<`2?w?df>3H(Y_R0r#-z`#Jro_v#zl=@6%nB@_?0Xcxn zF0dS$Hob~58Q1Ulm|^$aG_*%FzL9Bz%Z&EL=`^zFH##<$Des~vyiB##ZL={wTA86zCd&6x%_4-#c)yTm5L|C)7Os$K)148V}cXf45 z`9#y%KI#6wduHBllBa8Wja<8ph;|s^I{^d(pVQ1AHX7|}@5N^j#UEV3%g$OAEq4g) z#z`gkj`p#OGcmiXpX!*AZ`av`uvJ*wj#GY{2c=av zf>%d>+Me{(pf3Eo$FAi3Z9l(0c;yubU zw&i10hcZAsOr!eyrLl`O!XUG$jC$xwiIsbUXLhEW6J&7HCe~y?b?0^A0LC7^#AjJI z`);OMKjP!F8+}#m?H?Uuiy>#7SYH53ILhMJ)!9CQ%Gpjpr2Ef-lMEY4mT&sJF)EAEERAopnop z;6P3=3(`0em2Ca|E-+ z6cLLh8?gQO=n(DCf#`)B6?#aosoBoWka{t7JG6taI>569%7oq+f)Ala&$$t9(d*X= zNzI^suOX%>*oD~lev+NIDmWZvDdl_vdz@T5OuHJ!U&^+FzJ-6F76XmvhgJhKb-=5N z2d50qd3sV8B%&Jdun`slp%BH_f!=LDSNC}@LawpJHh%kC8J29f*a~-=e#rmq`2FJg zri2|2ysP&d6AcFjtOyDdq?JT(6%C69jJy7imkpeu3Cu@gU`o;$EPYp?o@5p(l65mE z(uf`a6(p#D!)W;e1i}OmX@I_KacGqO8QqNq2ol5<__yP(u|}_BmY8e5{|?3Dr$obF zqr@joKc%=A-~;0?dT#%<7LObSkjG551}~9F3`ocTFxpYDF4@yz}bmv0f!!P7cMR?n0rn{<{`}n zyNwyG2izl(^@5v{2<{r;Q^DT=?;XSfB5EKXh&M?*ItXwN@*Mz2K-}oXD6Fn5;|E3N z0gz)Xy#CKv80RbW-Jc;*z-Iu_0||?SOb|7&P_X9798;pChQ5o?QwSgmAhHT>koZl& z#!vQJbN`P+1riGKT+*38EB9LveCG;)g6Ig@VF2XR0zP0mW_8JzSZg6X0ndfkJzmK_ zef8$ecT!FFlU5gejW+%*2S-Q?9RR$AO0>xE{*k_2|MAp?%n-rk6~*+6K#*_>5YK+J zfBhssJYFFV;cc1vjbPjb^3mboAmmbPC*9b|4KBu$)QUMoMx#A`iJVprluD zRklC!2a=^%Mzu^Jrth^mHUUT!>$PA&NQ#x!eJ%I-+qTZ-kx{X>J*Q%<-E%9YcP|Ed zdsnZ21#%crHgY%t+%mzV>HisxWr3XyOVjZC_ZbO71llsLoVb({S};5dY9jZUnI$zKP0&9kJX~@gK;X}CGBvl`A+-E8RavZ9mE7$@bqNF!~fC_pw z+AZJF+V~Fo5t(EF*~b)~5`tbMzS$LRwBI0y5r7!HDbllnHv;Lk!4ly#sItWAjSz4k z{~>k+v!ks6K%KqWR&oL9>DEslXraLi!V&T50^{n{?PWK>x5S-+6rzX$ z{a_&sp`WPy;+!1@qmvl~s0cp*Xv0}V_>p8R1G@Yqk4JCc2R??xi+~stB40DaF-~TT zgRG@~;llPITS$~0zSH6KL5JxM=5Zm8S8u#GJL`zOL})wcs9pip0r@`=SY=gJCWx%a z9t68#3Xc^-7SDiqDI=f@>(Kwnu+UB%03<=QN5l_;yb6d{MDhr~qhX6rBesa3 z4b(xJQ~>jNoL0!qKw+E^19u!=GsMsVAp`gWVK9-m0T_z(&#`6u%Lyusjz2LqgfSGM zT&p}5c>zhs|9nw!3KGFOaf*fYUp4kxg%@oznn+u&e`f)a5+-DXu}Lhf3EP@_-KOVF z=1-vSf~6{ouqndbcVAs_UqbQF&sgZ~t>`6X9Cu4|^q$L@>ATKE0r$U`tZ*W!CLyyi zG;=YEs)L4L=T3h(Y=R?3I0?WO2zW*h{_QN~{l@k=QgPHY<4Epp1jsvv+6=Xfr^YRmZ0pzUZl%OCk6Q zL^2{0;Rryfg*C9rJ$1diG&J+X6q{@x6dS95%T`H$7ki7`zM44ZTgZZcNX7u*YAAtc zA^>4N2WbSLgatc10?QuH85it&(HIcOO)!tnwIFea#TNU8y&5RhMC^W`|+8Ah7o0DTb3Q62#dK+~O| zG&qg_x!OV=Q31xxA$Qy|am4|z3~Q1e?tHjR*kdp=GxN=>uS3OvCOCRJq_#wKvcoOM;tu<h28=w01+a2it$yR{{{vI;xdOM4G|lCZrD*1Z>Ovva7{||^s7`m|G2=T{XY{e z06uGx<&YtM5Fd%Y2Z~3oG%%3p{2*|`5i9gQ0~~H-Py7$-hyVwjvsfq#@T>^q7+Fps zMDB}F1=W50n4!jjnh{6{&NRGiOuVi`wHpLiHVjK4b|KIj+9y;YeM6-U>PdBA*5i&T zJRY2H#2tVD97GR(_ihsc4sbYP$%15T*s(}{;cuwD6uh2dHPg5fpS!7xp{10o=S6Xr3U zQzdG*|Bg!&IRR|)E*W(=5N`v7YVh%hCLag{R$wfa0qJn#h(;NtZWf95Bk6&{DpN;# z6@5HQ}r3>_|1O~Tyg{U#K}qQ*My zd(-bZ-@4 z!3pS|uqD%wEC72UoI)^!umKFghX82+t$jr-ed2!x3Pbc%5sp)MnGj;Q4xqy!2%13k zV!9!w4WRIYHi@$9I1)QFjfsvCcBTH9XNDzIf$fG4c8#yU8yEL{grWOrwdo2%<5u)k5Z zya&ZI5GG-GAEqewakltHaS%^+d@oiIAwOdQ64whvJEC+z`VP8LmPP9>GBF9rA67t_ zsYk*&VZs3?x*nWL z+kPuxM+kfjCP5-1DI*SlV%dtnnA|s>&_4DXk^=-J@E6D=S@ibl85oFKiR8i9HAE2r z1y(R}0uV+(xj-^a5M@t3J3Mr#J4P_rjOfvTJ(rY_t44|jh?DpxgGc*zz1!nb!w0@5 z3WIoTJ3%+Z9rxR|y?-_MBthh{oQV{-<-2x?cdy=k1&s^OKT_&yC2pTQ8$>|7+t%TH z&<;>!BDEs^;Dojg1qHnO!5txhG3YDAfRk)xusje0ktZeL@FKc9@F{_bp@-BzQ)?i` z4C7GL(7-U`aBkm(ITSn|DByajm$^ey@*zq?BoP(RUI0S>1}Pk(#9DYFLX8G$195%g zJ#rgWFge^meE7gUbkr%`0OlhtbukwbGZ#WvUh{0tyb&1tfcmPH@rkkEH+PJt$8H+C zKWaD_8P6N?|F}XcclPXe1}4Hd>#zf$E2de5n6j{d4t*{mT}W~_Pl>AXxpNQ4^vbZJ zaMwg#LZXjocrzXlUbi|nW7m0_dNlz9s}ju(rUk#7RixodpFlBpE{z3JxfU+Qg+9Vm7p&Hq!b7{CPnV zLLZ+t^S!`N9UY~xD=7%C7eR~})6@ZuAugaldk-S^&rmAkdBHptti*udTTpcXjRV)2 zM0ofSW6cfVG{6~wLl+bvPjsZ7T_55iA!LdAC?<{j|M2zkfg9jaV4#O@%nO43inES9 zm%u<6U-%{NLp4uWHeOf9ZUUkvF?eVJVU~@B;KzBkT z8W;3WS3=VTHXZ2m=y_a6KFB3zK!Ib5XqZu*Uc)&7MGhGx3)WkO)qb)S(J_PB#CCrE zJ6OL+NGgb09T*YFP<(IzL<^PpwO}hkvjh4gsWR~W*vkC)zo_y5$Du92NEBG|kZh08 zWbrWV>|T9Y1$7c%)xt5Q1)Tn6$0#LS8}IYp66xfdeKCtYM81aY*3B&J>51n54Eu(t(Bt7E~YVJ~A~E zfGJV96Za4d=Mre^L+SN{=!PXESg~plln9;=%S&o3gzv!p2N8t`yX(;GCj}9$R~|6C zkHS?>!7O?T1m$UaRnK0f0s*IJ78hfWhs1Z@v?w7U6;2rsb0#uipNj)0)ko|EZRlxw1OBcZzJYubc==ihZ1?LS6~Cy=XJoL8os zAG@nx7RLW&?2l!`=5-Z4S{A#P+`E#3gY!8Mg`y8#=guwJd@y~Jv?fPv91q9mZYXN- zJn!ZC*tI6KqJr}W6Av37ewsvZkeQiTa8zQ)4quoakfR$3IL@!Pn1^#b^t)$|JhawxbvBv%Ge%D5!Pit2S4b@LdRiP;7sO+Y+u-z3Oy% zXZ&;L>(oavJe3S3o{B7*&KAD>*fQ;EvQ=(lrmoMQjC2*IHk0JNd&ef)cdJ;ZHJc|# zKOV>ptMr*p|GKF?)zWCRoin0U(-z$j=#&5@%8&;PsEiPDp~wCinUki;Au>)V`Wm|~ z1j?aDif+OcP!$culK#Bc4rd%Ik{%7!x8`pC=%l-YpI>c9{(GjsN}1NPzw^6-=VLZj z_BK+?Zvb%Jq_*eN$gIxEGjHE^LDf&;WET14uT92&)oJ zD1xHloF?{5z=_^=?8jh4;`~zFguX+&a4{AkRB^Ew(xT8f`Nm?KpGPnncmF zMOBW50TDoA3;~i1bip^lDaGb|wvO#``}EgrH49uC0X9*5P61vBD7$PLoaS|^W&i4{ zXTR4X?I4ylXdVHpK|}wG?q5%h6pg^?x*M{lpKzyW4FJ^jW1--Q%7l*)GTi((_gP(b z0Dw&Ug=Ppsm!E4&ne7Cl2Ch!ho1>%9u0oXmL zyzX;~d!pi*{y(!4xUK-%y?^WlY(2p^ZiLMM8LI*zq#1~*AD$hKg4sB7rw=Hth&&cg z6`4NdWv*zQ5VW6EQz$NBsU0u~f;(DjEO^Ak?iY_`5kpBywruezKTngE0$m#{Kyd`E zYCwgAu@0F=5>igI2=beLpzQhUQJwkNq z%C{wc!-N0@4Jn#RzjSJgm|UW^T`9ST!k$wkkz*6v!>BtgtKhS>b?;fp=1G6j`@SJ$ zy>-Z&9@BS4ats+ZK2~!sos3tJ&Btl%vUP2aKK9}{piQDo`)~LJIomLHiZo!L)+2@} zAb>PCKg4Z(LNW{XFtM;h9{FbWvkKxu6VeK3=peU4_TUZSDF;ygU%xi2TPFZE2l7<6 zgE8ORW#8Sq2A2+~X;(;E?LC?Gx{R3}iRP-qB#%#g#Z2>`0O zJ}Jbj3D1x4wQ4A!<5Wt!22{|M%q0ObHD+mKqyTa{aW)5BgP=#u;n7b9!h_pDI6Mu$ z6=E$~eiC@jaDWC~D^SZ9B^n?$`)k(-stx)zXf?onw?RyV7BSIBG@Jn!0_dFI;Dr>8 z*q!zIgQiwKkN%_0?3k`KeYfsn6u)KHrS`eP)+qkW`d$;scoT(qLqr$>dE~V>EW29n zQJ2*3S6F#S7;linkm6!*=^`b}V`X*YFP}0sO&lJn0Mtbkj99ATu9M%QLRT6(8FvD) z;pYc$bCa!4S_yr27QTUpX~pI%O+lf&{iNbi4f!hH z4Xd@gJ19{1lavDW=|ZRz|Fc{F znxEh=y-;Dr-CuNYq#?V>=D+!_N#}a)w|ALKZKK0L7>k0G;w?Cy{}XRk8=)G6N0+$k zy*b{EH6^ksLe9G>#11oQR;&fe301>vAuuAWynBud}VS(}HsC z(p7Hm^R^xjJBh1Kpi&j8RVJPF)sxtqq(O>SGu92N#oUjU7)Do)6@(J!`+DkPz&-!1 z17C;3B{NF1Z+Yg#3=gbgSiq@F0r$5b27(%O+7oloT~^VA;ub#R_HrYX_^_ddZ9E&j z6X9wZ$tOtqLl}jSsV@(n4vt8axyIP=XFD1i0`Veea1MNi_!o~5mRfwUM?zcy@zU1Z z^#2}vzwN#M&8aQv-D96MZ6+Q?&C0g7zi%WcKb+&h_3K|cy6k^epFDK<5a7%r8*UIF zG4CCw9sh9A`^e&i9%iAbh6|t|o&hkJ0q@}X`4lorlsHMF_$6&W(n&=_3H~6MaydZe zu3!pE(Zl}#dk_ViyLZX{IO*u8v%q65e1nifM&#@<=JMG{zT<~ z(YTV}=b(Taz{(~zYZ4JQGC9Tn7B`l6FXR6kko(W7_1@Arpi6Hbs%z6NSB+0m@87lY z`0=-j-qLRy8gkXH!YdJ{IT)CaWa5@#S%~o>Xkekc4ax`7Bhs~iz5>z>co-vu8QR(g zJ{INO_NDWSR0gHHa1#O@Tkv&E=ar3qbL>TdsdHz47<^MvaS%^m90&l1 zrm-v`(7<5#A5WQy=%4(n9We&QV z7gznq9xU@0mt9Pvndu9@J;^cXtp46%ZFc^ z%Br`lBL`qqDei`tqIuGK!;!QK?$*CKcAlR9IcX4Y0f-ac&JbJ>dNn|@WUx1gYNU6K zL0qIQiaZ?N8TIF3O!?AXn7T0zCj z_DdJlUP#7DGZY+La?*@)Ne1;M?@Xdq_nQ3iF@7VJ_ij)#({givrPuhC0x^qv!y(0Ikskt*G_u9VG0f1V=rwWMUS;99%=b!JpY5irblD;+3Nkj%)L(2 zxWyf@xw`M57njQp9-Z+E8p9cpDqN>255{rHhVj0(a}sK-s-rQJH3>|5+GddWT&FSU z+V9!mnaYZbM+@8DWz0~24ZH_q^!VL2?>dp^S?Y$Hs7~z;6#gzTuP+>X%rf5O)jICt)x-Bv=S+WEeP1#&Q0Hwq zb$&WkYj$iS#j}wfs)v{OQZ)kVM81t%1*D%+y%ff>CixS$cqH|keR*HHc72Naz4X?kaYyB?G5f`b%%h$E#`HYkEDl6@_k%)b_dU? zR7S&6?KpaUj(rvhZj?ZUQg2Q52v^rkUQ;nX|qwffB>+KyD%aSU5Ra1i7#OeLx@7J9${PX%ji1BG3 zw#kC?o?hW4Hq!CwA$~aIVQxg>k*OLo~-QD3^F6u(MKH=Fq?$VjtR#JK-trP@ZnwAiX{o<3*TKE`qnZ?%WFUaigS zl?xuPRr}$lo!X%pQ)!PD&84VzsTVh77{e9H$sZZZ z+}ZlZkuqlGoTOZf+TpoCidW-aSEMf7nCx)cGpQQ(eW~ukx~bZSMqiw6vbUOu9$Ho1 z`u;G}Y@l+J_Kzy}vV@t~wc!s8c$0jO4O`6)%SB?WX_c zrUAtc8c*iqYb-f5`?}4kaJRG+!dOAQwVSGTGAyp@$@%-+QD!N!hUZ`GAI;FVl0Agg zNx8SAE7(Bem{Y{M)%MnPP10N52J0Wb+n8=!Q#O8Z16N9R$XT5pk3AFf0*_A|qnLEJ zwR&3=G&=IE$4;m6@$%Z_!nVM#waQrwvz$6iYf6mNZYIZZFWhnz*I^PLxF%?D!IGLO z`f0hjnA2qbz!y60v73I|Lqi0)Rprel zUfOKrIz9I~dGf%6kWjmWirouMZ?(>iRBXuOr8VXg+HOCyF>vu~sa!SN$!bBa7_C;1 z-FQ@3qr+CaYEvrsp4}c0iB->By_?E$MOS&>iqnq+R(}mRD_bVM2d&@ZYVR(uy5a6* z>enCkaDk1(_KQT-~rO8=`YWo=c3jNI^ggG21Ke-E=QcK_L=(|jSI zS>fo-(f2|($C~TKSX952JfoM9ILq|Nq$R94%R!AgZ|i0yom2hk8+@`}=CV2s+Tf0s zX69`)!U`wl<%)S1bb5n&%esPHR09Vl3r|X2ygrG0>Tkc{mX#65y`j7F?))E#rJKqd zwyD23!t611jyJACrgYk$>E_(fcFqm8_QOlxHAZOd+MUuDx5iwe$ycu?Tjk+eesb40 zoo}rxy;kd>Q}l|m(MmR;r%71JOWa^IzNx2)?s2}xx1vF(FR$V^N$WOI0b{qCcNp(5 z;rgL0V4k_xt9|@Q&ohT$wsa>s$6#;qEvnDm58c~)t6;smlO;#c0=RQWcG0$yOzt*`)5|-rp>K&^U<8g)x&7FmRPN6(>s6na=X+O5%yYF8KyOq ztS7l{8L@a9o#Y*Psu)pwr+9yE-*1;cuhI&6$@{yI9_H$NqtVe;+#{vu2VJbcAdUqp z0oKy#DW;{4^#NIb7lmnd7)j1Q5UDS4yh0t#v}VHTELmm6X0=m&CQf$uwox`weGEQs zH=(>~Ak$|2*m)YJm>5N6udQ9dOakl=Url_FL0Wyc-9M(ZBk-e={FZ{;b(Wo}lRxLT zFwz>z9OkS{9GqM1YRvG!X}I9u#Fe02@x;4yicYTV1-1L%jzRO3`TieWH<*J?;yrmZ z^M89b)-_crdke3qs`s`H^-A-;fXNBlnA(bK4~Jv?wsD^P7SlUg(@$;U`mIegtt6hC z#E9av_YLcdmkVyF^Q_EYtIv8~8Q7CWXR~@v^0Cz7R7|F>x4@6Jbc(Et?`vhrDmQc- zEjc5VaL(4pQ@8Gr{=UxNOf}ldLV3#;9)9Pq?7&I(W-lGybGa*fUxD|O9p#kweeyK1 zyM?Og?^79ugr;zRnA^OspzpJr)nO^`ydsG-n@rDU{@B7!FT2x4!MEK6j>nH3SxV=lK-Qpm{iR$ksp z@wYMl*qQUHxAy7j_xXsd4Kooh^=O3Q&>VCWUE=-Iu zQ%CazK6eqhhoD2@Np|_ysw;vfTw)J&j*+OjO&RcJ68R0{&r-6ifR6|;h7s7M-w|^+^pB=h5H%IR+^un;M;EE9m|Rg z=w0^x!99aoS+962lD)Odcpel;8R@Ap+pX?+Y_Kh~yl#b0PgcgS>~DkGp~VzY$Nhh% zY>OMO9Aa1Ii}c{5Xy21r)z~vygGX58TUBt&TGFj{+c1~^x52e*zqpyxd9kUr)CzhV zeRfPy32UU!R96h_4SaBf@v8vKo24E;J^4fTE9!(t<5IE%UrOwF*5kbC)&NU2>&b$r z+jiYjeN?zPh3#z8ib-iN#q-LsO0qjkD+47J_`MBTk;Aa~2{QW$igDjR8{QDV!Qr!I zp@aQK(W{dR?9_|hOw~zDqo0i_Hp+2qYWwqQ<_n$ifeWwQM=BY}+K|X!t0|gMnJ1ut z>}fXWcu0t`PH_%Ro{xV#BX8o~`+$0r}(h z*5utTap|RueXb5eC(lW|dQ-?dcJO82HQC9muyvDPER)1og5tWzs?FMSpJ%=IdSJll zVr^07c}hA%`@EO_FF9Y_1(K+T52#dUeZ^$Bsv=&99aE!cdmuCaj9USr1_b)50rsiY z?rbU#kA)jT-|YR77)>GqxR#kwy07-z=d`#^;mfStS%o)-8Q2$H>d@oLHs?s&>A39d z_LZ?ialatLlY{nWJ-T*2@BUG}Pd9gQ^7Fy_R7Nsw9E!cJRAiI7|9S9MOJm^eIX)V` zxjvL;$JnU_A24uKwlLdiyI%~=zMpXBwubA|#oo-AqhXEy8A|e@>N`dcY^GGA#N25Q zB!x#5kEsN%-4g!frLL-=+PF(!-tMSRtr|hs;;zgFEB7tPB(D9~ckQ98jq-+Hg{7Ov z=j0gA+GcuWo#SnCWwl9S6R6-s(mANNW1IcegH}P*>T9|N*pnk8sqh%uL^h=CNR zSL67TUn!1fTh|8PWwiqY*hCdOB^deg&6iJ?yf_x-kj&dntPe;!tEaN+kYS!$DcSY0 zzs2C?&$SdxbEo^Y^eOVwY^n<%h(-6eq*(59e~}|9D4<(ITYiQR!gH1a{SsvQH_fw`WkBHrXoyMdXJ-?Yd4R8GE`wUNi-wx3u(&dF3g`MvFR%0f$v zc6G|aHIWD2!ju&!C@ahjE#w)57i9XFsZanEj7-y{*`yv@Rb)qQnleK#)sx5MD^_yL zZS|5Kl9CBIcXl&u&!W^#7fLm(XGUI#J-Z#pqhH|0FRH6?$3W^%JY$Z=jjWuG;MGN= zy@)7?;S5Z^9t<&TF*Zfh+-!rW2k^uZRQt3foXQ0R_Qf>%^JwljKyF5}BUa`xyN@u1@5eLS z_B)BzL@E39*&j&U@Q5!-uFFNYo#ZSk6Y~R2?W!+Qn}U6I1rO2JD_%Fq6_6fiF<2h_ zd2p0&gTB9&Dzn(v8*>x33$c0Kf$<`7Z4;G6DlfAZS~wBacz;GH3Jdl9**JINghY*} zeKap!|A%vQNi+%}iAuCAl5VS?`Po8}yzU6cE4c1@_2|Y^lv$frXYR(iPUa;DFC-od z>K!Pb+dVq9)&1vjWNhD$FjKs8w%>?wN~0sFLQN^ zrOHVz`ja)T!a>sVbbYf1ba)@hbH!wZo_%whW>2f$n}V{f`I*)=k@LHxG~7s_u*Zs8 zS}CW<7)`6V_n6_(eCN(X`Ej&3k2s(BRr2FCA0$c4imL@9Le?{ze1+3bCl@kh8!HCc zlT;2Oj}}FsT&#M*ZzX@wle=tP<{N2`8*mrt-wb_Z&Ei2Eqlu>;tNMph|-tWHr zOU9-kXG^Hazy@01$rnKuY5L~_^@F|n2P!8{&?MfDp)_7ISww~Nwz6zoE*RxXL#y7; z0|)Ls=H42aS|Z~h=Yc@48co-?q(zlP19X|0l7+Fn0=T73NdvB&KLtILB|x*e~t)wVcaRi)Q;Az*W@@;4j>wWyV4k2~&r zyjHzR>7M%6@sx2^zV{qG?zOa@u1g!tUgZ?>?)^1ZTa%ER9C@%qo!P^pY#mQ{;;G7- zWA<;_58uI|crovyVY|P^*Ogoq%dD|Qx}`0&JqHfcs4-8z4NP~PTRwUyYfh?j^!;R1 zNy*}L;{L^wl(o&P*UXvOUQhuhs7PwtUp3CI*5?$#z2(8FnrPYX^Gy3w+qdr(9%9k> z#k1_kmG35B*^+0XdX0h^M7umwbUjpbMd!q=sE?K}F3KpXDeCHewVKf1)+~7Pm>8#b z?}V~M#D`mVsRj8iY;$lE^X@ysxRy;VdTZdfC3C8QJ}Ru_qo}VM;sioM%WE%+mV2Js zAXG{J^N<{auR?kDkORUrIX*u;WZ3ETP+U-uD|gTaKjVCMKkjolcZGqugil``C(Vwy z6M`l!7Yg`QsV9)>(ba}FkBym6B67bP7#21RjpnAWh&tGmU0Qi~pZ1B-u~pq#WpV&v z+c!htqxw{MlVAKwGu$TYFYgjN6u~V}kk@_pMI9%pYd2b7(7JJQBhCcw)I&0z9Mz?g z0|oIf7~A)x`eN@Z3f{qsf6+N{Oj&_zba(gUjc5QEM}?mqeBmI&Tzii1v4snt&&rgr zl8}*kvg1JFZf>=aYuiNYa7e%U@m-iHW`n`k?XANW(?$#a;;y>ee1wc-nzmLt)sKGa z7{=fGoH*nUAPJ!Z+x$wqzZ#G#tRVruc-22k11aLzhsp|Jv|el+c~{IOE;xL?sOf_< zQ#Ls88_t^WJ!#d-dsCH#g|66bqw#$*TEQE6|B4i+f^{K}{m$IIzCuPREDDNiWP<{~ zjX%UHRP4?!VzHhx8QvOf*jOiyg7XtTaLUdiK47-u<7nP4Z^O^CenP&DXZy$1jm%Sb zS6z6z@5z0^M13DgLcY_@}Z7_wVK ze?i~`!nv}Yr_UrPX+8)|-n+oZ{_vb;pVEXDdskBByz<`5 z9*XCm^*nxYgmK`7#?JFHLKR~UEGF)#JW$yf8*x;#l)vcIzhmr6#*}fQVu7sKi zCA<_;H^s6RMJ{=Sbow@V-wzAer9QF1AdSrXMY1>2@IL0dDfUBOhk1ex|2DA{NaXG1 zRk=k|bBvx*RS}8WH|NA2*{TXog!EYVL;IGWHVI7Maw;67WE>{%3Q6J$6O^cJ3t~$cj4G%A096_b-U&|EAt&qY;D_4Dk@ZP8T*CgUKd}gYl+{Qp6ziIKgL0#u*~_8soyW#k z184Vdk8hD?3`*HwFWH&4EL-0leqQ2&wpea`Vt%9eoLodt_`Tje9$c9TD+eX@^{rP@ z8cv4M(W|er)pNK-Hw!#5w$U7#xLq?(pVBvaKkqoR=%XB+FC0&@G9_!3&X%2xzu)^J zZSnM)cdiXBcHR*aY`Nnq;|gyl7$n0-w$KkCh>eA7=;z3ry=j;1Im@)hPcYQ=?{`iL zgImh`beQai#?F>}d^^#*t}EtuzQU>Z&Am*n(c4Pjq^u7V*}C)Ui*s~f!%a*Ohq9!Q zKf^*4)k!wZ3kmmY{Zne+#~9aGoAPP)Xwul_ie=0NML)A?|IFesD4>`Z_Q*u!>g+&N zjD*aS3#och*Dpj?-exg3T~?YIw$5zJRvQ0k)7bF;GTKy1GI9z)=#y2*N3;QEjYK;I`6PbRQ*)>kgAO0 z(HX(gw0X__^_O+c`&a4 zZy)Qght`5ctx1@+NW?TaZ{+vP#vGG}&DmjNCamOB%x!jS)LP~V%EsX67fR`ldxIBI0p~hj9_RQiIOa)8Sgms@DcgWpUQiehC(-5 zyPLc_X2d~mS?wy&x$EbX1vcTcwnauUgl({PmoUy)FQpwmqM}Pas_BhJ@)ql^@Sx<4 zNk80-U+gn}zNd^Eyzp(Yu`KKhW^s~jll#^kc(Zk_IDVUDFd)soH}9u*T>7D7PuOH1 zlJ8~k*`Z+iZqVYvZxIDh#<#^5?v>p7ap>Hq?EZ`DSvPVTeo>XIsRM`0FL~Cy)sRD0 zGLmV|dC%r1OR5|Bx`q`*TDHAGGUC#Np1n?;@^i;(``qzt~Z_dR7E+5yMD{RhxIv5vm<0;UEBBVww?RT8{5hUz{gTF zKC9i=o>$nEarE+~gQ1t=&X6A_DDS2navQYkYB%RtXPNWwDjw?bJqrxzU>|4wdAG;z zsL?@k!(l#FRXJ+6KkV24I7)5lK>t?zaC_nHeMiWLT#I>LAkPCmN@rZdk<;WSzqeU- z=cgB*KPa)oS&01k#?voGU!{v$x~&+72N@T$X+@E**XWeE7T@;!X2+9Hb#UL}g0ubKCoX&^HEwY2s%>1`pUX zEByQ7u*Pc?>7_v?&P<)!fhO<&J-)+~IoAwCf4?&5`+Dk=sA+kwN|r47+zv0jt66)l z#bE<|pb^fbh$ywKe|my^40Tf2+G`~au63>EW8)`${`XWl{MI>%^Bg{2Om(BQD)Zv= zw$z4yFX6Y!rwGxp510L-Bd@3K)IWN8{O)u|@@x@jHhG@!hTn%=l!$BnuXfw33Sne~6d{!26R5o0kKCqPO-}3t$8X>SwEjGG- zK6#gO@cy+V9 z(0K$l4!*YhMAPCD$w-lH)wO(*eB<3X7d3^htZSb>R*PDZ=lMC+?cK%y0?G#gY8uOb z&lr~bA$bO9>Paq*2F6)i$o5aUvPg%M?=$tk-|b*gajfX|;@`?g(KSu;u7?P16GlQz zneDP&*8CFNADk#3$r)1)MG$G*xQp4WOTeZ;r*uWL(Zx1@`kmlcx+b9& zGw~-QuSKiw5Ih+<-hg}07O3cVe4Sk)5#Q(Vo1wGNxr66qW>-s4h)KkiyF>9>BKy0d zumFuUokyBf8*K}sb(J^w)rF9IlS>XiFS=E zlE2y|lglzP&ENIeRN?n%)Bek;hNf$qO#~um7${%=i#GmE+=CzUeoPM*I|^>4|6-ne zBx1nmU*zL0>6^9UVj9zU@E7gJiW1c&c`fk@Y9}ucbw`#U!NzuS-i9SX6uC{ zWnLD?X9DD7D6P6##y&K=wRqa+?yBg>I`Qr)+PUi@U9nTy^gV+tmg;bf<8_mpULf+8AK zLcOP^?4g+H=xKR#O@WYeRvB~FwFwpMc>!1YfHHV@^ zBHEWOy?g$2+)Pf=SkelYw3CkV)x;a`q||bKkywcEw-+{ZBe7F)=ZZ#6%&# z*uj^kB_Ezxo80YKC&oS0(($Y5@r7TKI{40!6YtA^{i0GlS}rZ8eYb`osUx#z{46u` z{q0l3^YlMuqRJChq9+fi_wj8CI2tPG=;&D9XZL;Syz{;T2mGNrd6&(2lvxb_jiEhQ zz>V{X8x_x-Sx?M1P6$$#^~!Vf)FJp8oI9He+OFhrm$%8+- z+{2lq9?9Cd39i{0@ltpr)fyc5Nh z#svx6&T(JeugGDdjI&MDNB^npbCcB7`w8VVu|Bx9-8^X$i+@*$BY zJ~CiFLUnuj(YYt0DX+k2rUbRR7LF%sFsuSU`$b$_oIw{S9n~w06Vtgq7?w90_-kaO z0y1Yi7(AYhx#`+pP0z^0WC%Nymw)GA|8*Ayx0%@wE8EhI5eJ$-oD8+7()bZAJ9E7v za#rcmS9ZFVNf(xB5hO-cbJNY}!JzqmixXvD)W5?h*iLNNQl9NJ-TFc`;oGxuTl=KTey=ZGD4Dnd6~XJ0k;vPBnqy*%i+RqhPJQ#*&>vp5 zL>_$r+ylOWxP~cXRhT-erm5+RpOgFu;_Po4bbMrrg%j<#Ag9PR4%d#ooG<{nbs=&SfAYleVr*(h}j=;#k>=rC+Qc zto_K4*8)tYn;JRm&h0+M;Vyn_x`((o#w>l`LHy)yFAZ!pf+UO>E7}x~@9%aU2+aH0 zcQ)9N48VgSLRFs~5y8OissxBrjdbanOON^MC5XrSQkDGDT17Q);d^kfqxkh^Mis^s zlrW>I4!%7rKLq;8COeR$N5>)(nv9&$oZOhUpF>XpVPP^kuW zc52TqEW~AGFkxV5&9`rSf+s8Cd}w8xm!T!%13RMtpPJN{FONZbM(m}?r;~A#)}6iWiFI&jWpO|YR(Eo%zZ;Dp zYIKEEB@2#HT{PoRTn}I}$7G`#@~kP!y*9}C^)CN%X&%Sk zn*6)$y4dGSW{(ht>taXaZ68Pr;t~@Rwa&cKzLB0Efr%;7om z>(?{!TMiDan5KY%jK`$7iRyO}N|}FKgnS-sZSgD(^Q85|y2h0dWZc}`Oi%a1UxpZt zK}AACrbH3$^8Mi8vl|4UDkn-tigMKdA*Y3i5}CXU46vc_$hMG~kdP4Z z(!wK=+P}Xu&-8G_X~W;sgRXFvg~>Uf5wra2&skubiyfx?<Cs z$4zZ*KHv>gQb3IA3kv{Ns8in}`!GFHvc{Srbnnb9iTJj2yR`$ei)?Vb?qWX5YzLY7 zK$xap1(`O*=agFs-7ynt=60QH4YSlnqQyTYx}o$KhCD)H!(p73Qyo1wFUZAgAkJNg zW$U1vV?AO)6-q9QEONH9hwk9$S{jgF5s;gSXguv}bd=eY%H$~Xr4rsHF# z`(~;CmzzwLnxA)Gs*72oCsCw7s&`2Uq9zX?ADXV9bvQ=O>FWn$<-z)ajE3>#BQ8~1 zm?=ywfR>u&m)UFz2Oo1Cp7G>+jtB%&_gaddy=7Z5sT?QvTN_Wcr1TD!afkKYJ{`{c z-Evx;eH@aK&#a~B2vYV%+?#oup5JqiL@0O`)S86r`W)f&AhtNrfF7~5+)Z&>Und@4 zyy5UHbLrySz5O6e*S43=@4GzT--BgLMzH+bSJ>g0kLRa^hD8PK)a#h#e~wLvlxHw= zfgKT~3i?t>-HU59TK#Qt-h~QFLg2KV{<*tMo$uNB=SD4ZxYN6j{_G$JRS2>H5|#>JK1^(-AyFW6 ztcOQODJe*}DZuvab1q&(fqk}fSw(>xvWyVpw zs(*nJ4;qIci5&-JWv7zd|0IzU2KKwJU~q7AE>-`yzW&yKFHE;M?@5&HBkBg78wtrA z89S?!_`?;X(?fJ$Z6<}GWZ_pE9jDd{%~dTXP@}Uz5+!BS2`d288xPlGY4yQN?v1&d zuI}c4Yg}Hh^ycs5GdVdq&l3`kzg~_NHiSG$3)XA*M{ue}jhpJnr=-xIIdeubF*iRy z4)2KK6Wl(q>2B4`jQsfvv1BEV#GR5AWC>(7Tq>P-hZ@0x~Z6~Kfd5EDO8FUV?*KWRvKR6Wb{O6$g{ z^qYb+Sz-0BkTQlzt?^K2XAbi?B0?YBEZ!CMNE1NqxkL0x!hBCtny*M6u9>;HzN^l7 zIyTQg);xf1H?*JYE;1NRyI5$PO52o|OX;d7nq@;Ok(udHH|kM~;MRNXCKTC%C+TJ1 zbfESy4G^ZR=i!z4w6$t=N!}=V(r+e7<2d?b^XP68o)x zN=8W;UFpAOrx$+T zY}ND*D1P(VF53O3h2GbWUrL6Tsin2`im@>j5-w!k)VK~v48-GTdnXpo>d)VlV?EA- zjJ;_4&M8+N9P|%Te6TbiB-^R*FGl0x=;1d_+!GSi5Z$2OYI2i6TAEP$U{gIP+b>ERpX&#F2u(Ns5 z)TFlama_=*;+>JANl)|#J0!c3S)8bjdQ^!A^JE;|;~h?kJpbUqU32sEOh?Nc6>U$* z?>4$>_E^EEF&9xO$NiWiFPGml3fY(|Zsw)4qogK8=Cg0v==enmw_ zd)sfMGT5V$`c=C7P#jnGnU|3Ymm_uB%-LCUzdaSLxrdqIgL52uu6;S3%rgIq!3BCx zic62DFk>wmCSmns6x+n)SXe)8_W%iiSUtZ1%wtybaQ(Q@&N=d(Zv@BX%7h$s!(~y$d)0cJ;2zVRD`v1?&0E znM(5iUtLcF&*b{{B7N!mILXH;PAQqh<|7J8aZ*GWwl$<}W_!#*%eT@Y6-`B1zAZ)D z(mo}LzoKHJg|V&Z%Mhz`FpieI&+0wr{lD-3ywC6VJkP!7zOL)O@9Vy<>%O1sdhUCi zxAxq{yHht>>$kzn{9((7GM?!O7XjE9gsyOOBk1UtQT>PPCm+^TRbl7K=4Z>u;BEr) zU7$^PPpL=GexB>xHA-(eQsAu~?skb#A8eSDhN-1rk9-y8C#mtM-5Wf?odd_g!9b+h zrfVt_Tyy`)?Y6_N(*gZH<{$9`x92ujY37QU>u=18HfviNUYM-a~h$#+E_e$J<}QhFskDGaFoP$&5x}j== zc1_MMgY>$T)wU=p$v>?;yX&3ZLP(`$2d&n%@hHb~jKAQ#T3@*-_qEmW2kfx@HdonW z6t<>XUDI;*R&girCOJ#l)eYmw;xbICOUxs87Pj`pwy!Q)m{*+S(_UzG;IbaI^%ywL z?OqzTym>P&3(wC3^mtdEuark&&5%Q8QFZO=+uai<{$)7ihf&0vzC+aKAl@Y1zf{@I zo0<|1Ux8>rTHeImb2VNGb#JJSWvsi;|L`O?>N%Xi*LZ`Tc1|S`MHlsM1#T+&>ZjX` zd=pHY&%tyE&t0}Iw5Rtu_x*I+PS$`qa87)H>YK>yRRemYR!c>f***sU=xoRH+kKZ# zlr9!-Tr=8e&@=GTW6ZW8r}!PrI>ConGD}LgQZxIN4%{3+)C9Y0qBjltY5oYIi3Zdi zR8Q7}55CracHs8NyXN7Ffq4U@y8jTWpo2IE`Ro?r=q*)~dUgSd-+ti(Fxd{oQC^Ne z-ox!3d&03W`0T{P3H=%seZ0}fhqW~o8jcITdQ-wUsq$*dPmz=dt=2EXkP_XyfdHUEQZ?3s45Hdq3PeJ$}$L zUm3K^G=O!Z?E}mM>&#{6$ejAWwg^Hy(1fPER#}8N*69seL+>aO*M^I(==CZ znHPHwspPn;XC3H&Djg<^5w8V?f?O|m=Ec(h|HeLvG@3O%taqQgLWVoTkBwk0{3t91 zSdiuD5s^gZxH9(yLlrreIlDLi;te_e3HE$NkGw4R$RPf^62;Mukmplmc;Aq=YCLL3 z2SF88WZ?JM{J4<{XQVTZo<6rNvs#Rhug5lLy=5bw`n-L6m$(H29b9nLtu`oBgZ}&-z+IGk*zD#KekjWDm ztIN;4z?UZzzQRv!v2QHHwpp0RT6e#fk{^2uJD?Ab?rjlB^MC-C%7BsDFB<3MgqyvM zH#7`C&^OjAfo}4Hnu?QL2zNh{z#zk-=o0dE6*Tr&9&4lp<=CQ%0D)CeV?U@;wm}A&9i(KwsMMG+v@8#o z%oZ819szltNfZt1+FcZgFBQ`;0QPm(vBsZy)XpBsB`F=~DPpa=qiWyJlQAgy=n;7K z1+yZhg#`A!FAvvzf(y!&brEXTqF+LW5zxn50ym767}o@PRR-;?R3UK(JvdT_o9{uR zTtJOX)uF3&y;_2CVFh~Ks=We^gPbH?QoeyB`c2~kudHt&QSTs{JBx1yxm1pyv)}cl zO#B6>CGgup!DvyC58K3oiDF6Ls-&%+HXKd7-siLRF59qA*lBX_OJvZ#PkJIdO7QaR#^cC81lI7ebBXYc}x2M ziGR0bz%j~fYgACG?fsp((myz&>1f6_$^OOX**Qn+E+}_k)R$Ue6w*QV--A}DEFI2Y zuOgvV$Y}LGWJ?l1kEQ=Kcdr2RiDISJ@)&zM^DM9Z2`-~9b`#v)!5SV04RsHA88nT< z4_GAEa-EkRNeormXc&sd_dFunM{3L5`bn=54P>)0VkS}gTGIZt{uk3WvHdcs2mQ*8 zhYeaHAl!bmg@~fNP zF=N0C`+8AMljP~{&wiy3%*`bg>p1XFcuAx--1A3v?5~DgHA(2@alo}szpWx zz)0>HU`s-gQkwOaFA_Y7OkdwV{Ck>*bN{Jdn?cd&^>LMaNJL}-d<=E46^sP+U{|-?6BxK z?ti7k+u8n%9mooiJ|dY9|1`W-Aev4WaNYYT;uV_)+rkGuR%%pb&<1cSat?3FNgitW2rkJ*TTbO`}YnYLOKk zZGUZ=xE($yFY9nAQclZNaQaJ^O&*EfEQkK6{?6h%^;0xsDXd@+KcEi2N*}xI><7w_3smb{o8w&EgtV0HhJ!+Z?61Q-G0=@94~#T6zhL5J$~~zeRj@&#cq|fV z=!F^h$F87eE|Jq_CQs;=j7k@d2{*YeiUBKhy_UH=INWx7Q3a$|A*!0WA8Bgz^zbi2 ze1B&O$ts5E|#k6dCzz$I&J7jn$k89xF-gsBzFN| zrlEN_fAHCle8+N)+Dn4RAQ%) zf}(i_J$w`)i10=&eKc#al1HXtsS!8|;v^;Qwn=5|Hc9>p`kT->i^ z?76B}X?y^$X7SCgrQ*&`qe8X}DtGj_yqhm`nO|AQsO>Q0KQbvEc-5U1Sg z-E~K0yu6ePdlhvco2Ci|h>~SL>P`C16axtYCv>Ql`jqC)I9t(xZoxl%B#`h(TInwG zn%jA9;;0gC0oZoM-rUWV%m&U8E|y}zGPGcH9Dl)(?p+CL$Sk=@l0qS;n@)$+)D4cq zq-i*>#<})crogo?wQYL%)*Nz@JtvXXGPieS#;R2cKOcWoThw6{Q)k@PvjVZZHR_b; z=7UXrm-9%g?0X)zKTpvMmPF;mgEnEG&~3N~9f`FADjf3`$nkPdG$a@m+&( z?hOU2OUL5&2QpurO;JS`(Qf6vK?2T*G@8`dwDT4==i+1Ki*?)%?p$kP*WI~RpsD2h zF!ROrOgfyS%f(%NW}lBr%q1yAUhE8tp!OQ|dGSG`3ytX@53?25N$z#h=F+a1k`CBa zkw>0TVK@0|Q}o$6jRVWJ3~Pkb4M834;arw3FO)u{RJflsmEi;WssgX*oWU|X%cx3D zHw=;96lHggLeR|m^ws375_wA#jpRCy)2o}~26m%`<-4Ikv9k0fI@GmH?kZPjfJH~T zY@0q%kohu;*}n~~h(L|^Yyd_2=XM_#tqCU8<=B(3>z^ zeSE`xF2D7~u;rur3`phaN9M}Z!md7ELH#<*VcE3iTNb~sQdlrlH1zsj9+@VN;H1acl5 zd;sU|4Y2^zOCSpu7(#Tw)B^BySzz>c+GBw^MEl!wEePZ+0kZJFVSK>5=Ewt24bJzw z_T?Q3hW zIR1EeJk|-!0dEkPHOzLqHEg?$ZGbh*$@kcfKzn0ZR(r2U3oXBaovZHalUqmL|Jz7`FT;SjfMDfq#EyX|l(bUIT)y`YI-2Y0MxW&YfuqFqhBdjbG3SeLmwP0Z2Pq5IyJI`rV_+VfkSp6IEbT6@O!8YQ)9xnS$q|yQ2%`L&cy8Aasz0NQglMq8ShuFH8x4n5W08&M6~> zSLUOo(l?Pm@!}WeSHnhd-j#+wXsz{GA8m0M0n2udBSYKGJp$W-L+G#R zdAqUu{l!rMnGgKK+sAGyBvzgNh$tkO>W{ZR4CmGmLQ6UhG3KuhFI`xOguGb@U}%VT z91L0MFqb4?7ZR0XnqXk^!|AegDB_Iee3Bm_16^Y=Oor_WFp5Slc~Yf4X%V)R!4j0% z@a-tTN>zwu8Fq{X5NEwZG6K2$P{6)(^z*p6#k-*t@moS#386^d;vVuI7`{m}d8x+t z{l*`>YWNkV;7hENFp~P_Q@HJu+^+dM2AM_`RNGHdGyT1z))NOZRf6-gf;7f1GFQt^OhY%FEL9Xn^?*x+{%p^CMbT(kWD21A~^Vk zOO#`Q_K++hoTBPQ(oFoai=a7^5H+a}KAB`mkBLw2RKLY_sc&>lE*Igwq#CGbc;NKm z`>}%j`^7>)B+jD~p$Li#5p=`)!bpoDpKGQc=!m0s5dvO!pH%gJuu&=N>7*s|)@xfz zCixPSbi=^cZVB!@_NceutG(FLEns%Wq<+osn;^pGsmCJZC6?=SE)Qmq3Q;`pM8SL* z8s6W+khiYyC1l;+hn93>3embk*v4?U?1yM3*?n^TQB2ICO-QMoFa{TyG!(!9v+4T* z0*pXRR~jasp9Q7<{`*aj&s8jOH@JQb+(S2i&&M&bxJX#NPcUKKStytMZx>uXi&R7y z0&mA(X>17iepuGav5>)V1+gIq`e{Dskfwjeze|G6;jJCYKmI85t=JhbmDt$(yx#n&*J?nmmdf!bvkvwxP1ixB_ z()A+g?^i1>d^o|G13OSWUZtt7Fjsdcr_j@lr4`%ziW9RDcuU8X}u zXUE{fi`ldZv#vCjG&aJwI`{*r2a%1LMS;p~vDUiyIs~xk`|y{`TMCi8H9p6sPKWDv zulRPmB#MoSe)|jDl2%{Fjs= zkHU{~Ar@kdSaU;OdnooGO+JnoPBHB1@OBZN9Gp^r^Bj?4Ob&RXY$@iCk)o8@sRtPK zuv)(K!t*(x1HyVj?jUUeo9v7O1V=E)ly8LrsrpDbkOq44s_;pEp*4%DUzDRjdJU@R z?c?*+GUdq6A?S8^GD8F#NJu^Cw)8w7vLYFJSU1`(+}km`ezWWoUx}q%v z+67AWDha^l{HzYJO|gYulUxmS5h9fjHkEb^zSsZ8n9df^9M~MID#|5(Ld-*f6eC8? z9moAU9Yc64243uQ{%t;YzVS3jf%PyTt~M?-u1+CFAuLX=M72cItZ5pqpiE*SfhT_U zcaov3Jw~c%ri`}8nv6B64uwlRuOc(m9EObq*XQ(a(X~+ulI(&lvTk|h<(7pfIrw?B za+ziAGHzK{ZoBlm(bQ?w&eXJtN|>R;3W*uil%TWX_8f&0WhGv7_y*tB1a)a{X)m$Y zYzajtC8M${rTt>%;<++ykXMO_a-EVvsk9P}0*wl1v9nr*Y(}|t!MMVH@q}`f9B;0- zq!(X6A)`8(1evTs=%95Z0v>l7xirlfpLFIP#F$lD(RZSmuW9U&!rRk(GmSIjldTOQ z>T~Lr6_yp>Dmv6p)nAv=mgtv4muBi|>Z2`Oj&wPcIV>|YGmJQ39IcN$@${Je+5LI- zFGH44RYcq8zfa!I>>ei_%^j{yzUGV0Bm5be(#V<5zUL#FZ<>jo$|~&=e~<9a@=Nkd z2A|LVv*FvLCHQA@{Og?Uk4=_7mOgMYgFIcb*eUnC84<5RjzRCi#lf;jS)BW{Nhwb$ zD2g)aH0jKlhC?ZLdM|>G#5EgpQ#^fY##Qqr>!-!hCEA|yh4S$eKlXv2dYO8iOYH;g z>%IM?z50d1kpo(iwV(UGjo4T0vX9&=&nb&5BUIp(yO%pvEY1nfeV@CxDrV_qQM4?z ztXxL3!fj-4{A5+NkmFqGtmcgAoO~+p!Rq1nD)Jl^8yHJS7DQGivSu%5r^|uC@rTWk z{fO-cCl)6QTPMB|Bel_=F!_BEF9&<4VW*`tN1OV+gUP9ZymXN+=SGJ;4iz>XB0&Ol zX5;acLB=Z06RijObWQ8FX-zUs?AFF+Zzl&A^tRd7+g6R{q863bb$wIDTwF<&2~^LyM!4 z%Ze+n9h0-6>*dY+2(yt<<1@X^8<#8WXQDfwi7Ue~{fv*@5dM%7kV}x?A<-dNp?E%i zLm2*83U`F;jF^e)`7ICy4Xc)P2WvTG%t)o~S#VX*B&hL6N#H<`6|4t_H8wqxO&A}U z0GYDzN_2e$h6sz0AC>c;(qfpYxhcim=s^^NTmx$T%)zLr3|tTTC@j`5)-Ee;uI~>< zm-lFvXykBoq2{3_p|hdq@epy!6i!i}zFW^VOa6ZP?Y}dKp&+3@9F^dA>9;)}aW3N| zO()*L-s9d|4~I>0Bs-en>KV60KAs<77FeHOziOt#CjXOqoR5RE<6B48$X4ZU=f>lu z^0oWp+Dqb#3GNy$4U_w~4ki_&JL5;=M3Y-MRewZ(l|_=p^t$zZq1}^F+)?S_iV?+O z$q`P{KNx$k8kC=8Gg(>$4H>9Won$V5)*4H6u}QU)5&h1JSy#>ez`L_d&Jw3Xt`)=+ zgc(FsNFl?`CT);rtG?{mb{Jm7O9M~+WChI=z?o-McsT0D)Mm_RVwDj&DwDPPZOC)n zpMBLKVE-&lG&Py&{9eCkD{;VPpK{DbUAJEM%zvm0>RmxGSDlo;rD4)yb5Hnzv?up& zzyPaC=cOV+#l2>X|*aqk}0W?!9k+e`|A4&ZpB`2p5A_x{}>-GwsG&+ za+89M)7swZSxdaf{_JXRTh{qlhitRX6W=TUqhMb!$Y{Naw&m+1e zT8Hbm&sBj(kK)nHl?lZ}smD)`s9We;Z8x4bw|j)C@CmQaKISiKkJEd*IbAPr=Ep?y z>#q_o<`=hdtnN0>olb7c-%z+OF0|I#Ce9^!54;v`V=pgPit=SAcw}D%o@&p^PUj14 z^w!Ui_Yjf7*z}`L6u|;8$9{or+{%Cz{|1}fc#Qa_?fn*yDdcUONOF-?{SrY%V-5DF z_xwJE=fTMV!Pjef^Ah2xtN;Q<`UCMAWVXTJ$N8MPv=hxQbV9C33uyP>xfuvv(n)z8 z-car*xMNCNeY2i@z|A~7Z6q5Vp<{e0OOqnj$JL@BN&Mnwh6fF`=XNEQ< zRUsTbY^8qx5>?VWBx8L%lFCda&a%M;JsHJv)QnkLh8__ zdv#(!;t+HpEpc5i|AS=^>TB@Ib#Tu)=W?&jbSC<_G%XRG7x}l#XS>2f2fnAJG4f~c z_Wu6Q_bE?YE?rscKA#;guuKgOyA)_l zra}2Q{p!Nof{XXap_J;RQ*BY=py{uzHAdWRlI}|A_$5qJNR;o!^}@N@AMMSB0_BpkDN4BP-rwXX!^po) zg0hLajE%*wuY(&<-&w}Sy}WGVkPZhG*jfFCmfe1;K0&Nq}#!7LFpJ?g1}JP@t0#^g#U&Kwb+2l9ww4TOfT zOqIh=AcTuhf1=!qxkssf-ZN6V2ubw7ijI6J%=7+|N*pR#wnJ;6&mAOYcJ{Ca#;BWPrE|bPJ|iseA4QwQL?64z z)u&Wv2_xBKK;`^}71o9C&b$kbhjEI;%g7ijKeSvD^*+k%M4=BE&3t_V zh+6gRQH^7hFFl_*e%C;LCrE(uiXLBBqjM)dAsBO!V$1FgNMN=NiXG(VhNCSv=uQa^ z!j0lobzCAs;usL9AV6;z^`E7n=Y@gXx7bDOb?%kVN#0boKux+` z!1vkS{baa+OJJ4I(8hnsb60yIBFRs;X+VJygW7_onemotRY(~#;iJv+m7{~9&uv|9 zzId&wce9qV(lcf!AdJ${({w7BUjtRm%x{oF-YjU&_AnM-ECyEkfHPLK=1@32c{=AB z-BHyNp6y|y1ijeoPdpP?_?co>rlMN?Vj0|iiB8ASg?i&WVGIWE>X=D2N%(yN9uKyu zkOtxUm79r9H|o}k;?aXb8iN(54h5?e7fCP~=(N**dg*eNJrd&yE6t|3aQ7oVU9`o+ z51!cN`vtH015cBIC~_|r8`waEhuG%5Gngk_g9kr<%Ixor9;!+wF_5A;&bB+ zj9CFtUA#9L<2XitkZN zxgk8+4RK!b-*Hd-6T>7w_`;=Dym9gQOwKlE8pRNP^wZr~E)~xo4oFjh3u5A3mR&c~ zhJe9OC~!HV9}>$DGW?o_zM6ij`H_S20SligAeBt&=3Cv5-{TIO$*yB@s3;#{C;VdI z@F)Ft=}wON_NY7pG-G?E;W|Zb#$|{%hW#JqV$U^QLMSvQaRg5{I=kHp!%=Oc+(cE? zB2uJKl*`vDXohJ~5^&JLGT@yaVHr>}>OE^na?Rn1()>LFtix zmdMu(4Ud3|(EF!X_6mwNrXL-oQy*#w1>(EZcDA~IDcpI{iyZWEiqN$R+J$HUs3-L>No^$ zRq#^KZH8tify9P)B>R$tSZ6f4Cloj!5gUI=xI9B;OU7VYvF_YqEYD4eB!Tgb&CH|& z@RJktUr2ufFn0tth*4AED8)$7=X^P;lfIIk;e`w_CQKzmPBNhn&gyXcGo2&D;lU<)MqG&TG&qh!L|^=qhcv646SEM zW)_tuMi1H-E0az3^;BqbAEkHa#83)#vq!uSeqyad84~Eb*`8@LDZjn|~x(__5IHN-{Sk5wpjEKdgk3 z`F@WUsLK~<5P;PaI=_N|z=q_5!HL-E&{d1mLmQwHCK1Ktr69KVbp*S>A|}E1Pq<-6 zsMIzTR~xpF^Naq>ueZQU zWb3yB%l>E}kd7(*hcquKN|z0Dr*e+l)?Z;+s?xc-vq?)l!0D^;i6$kz2v#Y1XX{e0 z76r_y(>Zic;F?Trso}RW|Finis4HPOPrOux5bECc*d%pxOK1>3Bw|s|h_Ya*vKYCM z7^E(){N}g3hFUGMf_j;hTafr+9%9M{Ml)=SXDla{**ZQ>=*cW2GfPjadhicN-rp($ zy+l1u5mVqgGGafK1r~=gB?7WYTq_B5`;H|<@dVN@G832#XE`9)3-0I8Y3PPnubI|G zPX}F}0+;L|bn&I~)Pv?w*<&P%cWQJo()5t{JeSD1zd`V!6*Ys8BW*a7AO8GkgED-? zKR_ZP#?oS_P#VSmrdtKi{{>R(W4Hc|%%fD?7QS^%4YaGs)+9JJpXT)fzOT74A;A(B zOt9aNLGF?y3R_-r*5VqOY8QxlCiyBqBC{i?K{aIV(cJujARFpHoHm2XMQ73b7@g7> zdAu70G|j}1JQNWoXL746R`CVJFdVM}9ZVanq0B6t0)ro1>85|hafb-lzNL(Ki$c<4 zA@PX>Lf>p|s6M>)XZxCKWK$eWPq9w65_eiGh3l6JoV2o}+910+=4Pszc^vq!f1+ku ziR!c_Hba05Y==}2=y0%_ZJCXLo}rES;!27 z?A^c27`|NDy*VDNYg2OZQZH1o?}z_Dbj;vpn!T2W`u&R6d&%Q%@bc8&#pHJGp|xs* z+4g=P2K#1>$(_K(&$czB4q z!a6w^^uBmr${hVDshVeff0)@EnkczC?_kV%4 zIR!Su)c-$yrK^=K7#LM-kfMft-&fd|eGG2@kFY!2%}F=&sor-FT85NS+fw?{+TFDy zVUz0<3|<3y?6KwAhL`%Y@t~1P7Q3+ZMsYJHuV!)u`j7cYrmge8}cYPKVv>Onw^{sn$ZDtQ^Mugv|j~}&b zjwjcH!Z~-c@Rz;BwIW$Ym?sXo$=OwlEj6-d7&(BASN+V zFsmU)-BtsDxXpsqZ+2#y9ipFXH{&)eraCS(_JP#btQpTw;jK?YpP{+Q4qFLU zi0-#r38UM*{wS0;sgsL7Ty=$ie1+V$or`Hvl^g`}05EYOekIqXlMEM6rGcm5EQiLz zdu=h2n>ADf>f#WpJBzj?i^r1rwPVo9ZP!Ic;cpsxhLeb;+Zv<0_8m{8@c4 z*}>&c@^}Kf!qfVcOBu~8@AObP&FlF|O6c2t##@t0^ufDM+k58@xwQtrmCAT#%flXX|0UAUCGV!7LVNbaT;#QA1@pK)20iQ>Qm{SQa?@C&(jNq?I8T)T}@f#dLJC8K_UZ`cF5!t6D8pm3{ z_rt%QQbqITHq;qR&ISD|&~BZVnyV-Ex%r!M*oARbAI|s*%2tiQM+F@fp~5Pe>DA4s zN%U>9|B877IpfmrG=kXb+{vYx#F0WL*s)0NSO#?&x96xT=JsYu1)XP4P zM(wz;Dz}~tb$QX<%jzBVvtip7%VehSsZ?sz(urj262JDphSa&7S6EgnGcyUaGzql! zFneoXCs`%wcbDjYBGql9w)b*dP31xOuk#)RHRh>o+&CnzK`G96+&WoJ$$Jyer_&(* zIS}e`)$Z*2P}H=i*J>i;zgEBa!7EV7l%%QyRaO;EP~dhf8y&h>8B(2zQuD^uNVl$- z+sHX6@qfMKsUSaA$Yy zTLl%vANUdzUu-xazQHYwH%mBDME`e&E|v_$irp_AWc*lBLz zZO0{urvsFC1yPL;R=>|8Vs3{Dvg;7qxZqT5-fChQ85y?~0RMH^lG#l$=1KpwsF2sX%<#BmQ^JlmYt%#Vgrx!)o)%6T3!`W zI#w#n_S(Y_l&4DM7?cwsWh}LB( zTpf25#%;5w`Ak?sIzi>UIW?ZBl~yym-QC?&c_IT5?M5`ZGy%Hfd6YmVE3fTDP_v+ZOjjcbOjU ztMaY)olc70I^{KM@xKFess4{1+Q~+~`T6+; ziV9JGzrD5a(P_IY`SIh&`X#MCH=K$NcBq@=_07`!z50~arGkeUXSg0A%a)= zJ#rTnA?s1`>h)lTmp|bzSe(K@=)i8*@J;U9t=Z2UIxbkU^k)$H@!2xZ=zl}S!AQVD zCCpUQP-)7VGata6$|jSnXtLF`y8IsICp(4M$`oa3B53|ds-H128xs>_7%4KhcV&Il)~`6*lMRJOx^r6%DExGJQfZCk&K)is^YBsg6#w;EiJ9MtgLU- z)|H{$AD{zOv>SX z`n1??#?LPGyx(M0y5q5k@sSAIc( z-H}<*^rW1uo*t@5fM4~6trDo9vJxqM>_?9-G4WTF@I@&;J}{Pa#7O1vS^XZvZ#nbF zaPaWGmMjJ=>2PpxBve!(Nl93Ecz9s2NJt_{M)*iD&;fo=Pww^Vh&efW#ibI0f{@B( zAXs%jzRA<#(y&0oNwbEU7g-1Pfhf<57RnJz55K$RFqH!l@Z)?MfjO0R)qRJOfcCZW z>a*z5^4NgbNqka65Do!e|DR$xX8eoj*LAT{F){p4sJ)FM=#r(!+$>yQzxu{Z#z011 zRnO70u{F;k%N|4XZSaxxYzL@^s@`AmlR=s;S{2$wnl!-zrRFLRO1ucM_4f~0OpEU; zO{7_@{}NGO??{V9larGpmBka8@JC4r^qGMn+DI}vHYsUPoH9emz0-4Mah^k>PM<0Mgf@RT%N`~+b&s4@&yQ5o5eZRJlL~u2MX%F)l5r6p{ekgNhZI(L_N!FQ~3w@heSsgRW?TP zzH7=V*nbOvMkKXyAv~V15{)A!C*QF}j^!g}ON%Av4yM03GVQ@8)SjUr;Ft>`)c7O+ zBPy`GD!hGZWF}P_c}d-P8R;m8AeNleiQrQTnUgwmreiS_USwlHWr=CMHfhrDY{VHy zwy$Y&kv{wrP!Rl8B;sh8n7f|Wx2)wFZAj^>gVmdTZ2Kz?IA3kSLg7M94H3=E%<{=8 z$t;-r2M0*RVKgD;-XAH;d^DuxPDHj)Cm9i@<989))Zp{nQIt}t>gAxXrRX5e491|lt!-Tp>6#hA2;3|PK@}(8g*DCMM=T~E)1$x zr9By54H~c{@tGi$2$O_`gA)OPMva-`$blJozB}#HZ}@7yLnR;}@UVTqUZY+C^s19I zB0D=fP9!+aM|X2`Bv>VrK&VI&K{q$t_t*0(p&_C8)Y#wkhcnfSR+cN(@*#i)m{e>I z<^;93Lr*A!Wa&SQJK9)7YTcz;O^_Z}eY=~@=*Dh>kW~R6?;}GZF8UoPeAyl&31Jx~ zA(ID}o;*81SBUk;xv>QV!c+|ZeSKv3LESlBzWmga5D92l{)W)*S_pof&$P4=@v=Or zizX9$58Kfu7aKjQ70SaSBa0cy!6IcO^z>294{XrLgb`Dwd>~|6EGKv=@wnp3%EJ8o zz=+Y0w8jWQLBf^wzkZ43=H~tW^($LP9j~;MR5EU)K^z2nx}Vru*p3=7{VJ!S5nful z#*hzacEJC2tiqjBRnk6KmjubE>u^u4Nn7j4Y|-r~dk`F(@~e0RV_@RiX<9no9AL#C z#{2*+pY(sa$p?CEC5?jMWqERPly&!eN=JxRa9@;6kzf_8E53VtNos4_k}|^Ym700Z zNnx`o$|EEKTwHbMv=XgpU-L_{> zs*3YlT$~LbskW}}%a<=Cq~FjS!c$*fJON~)qoafR02BK&A!04tWMq%daHMZqB=Sn4e8gVMxc?is9sn(o+@j}cDqE$;xy+a1Qii-wBIFK3H_7YPjq7u?5R zSCR_+eC&=@M2`PcCCN*^oWBTEHXM&_#>Q9nucu!Xo0`Ej3bX(jlpUQ{N)6ylhj#R= zd5?KRLu*FW{4qbMX2}w!yacAArnHpMen=!~SNRYSTvMi#fV?<$o;OC2u_*T_R~L_) zJiY}e=yancP~$I6!vWQ>*lMu$)$uGeM(U8h7=61J8^u+J*j19=w)|s zJBzWpCct1i)WHWqUE#b&Q>ts{#C-`(fF@^MBd^=2EB^=Sd;Y3*QE6$|j~~#E9IHLd zpRmFkwolDJSI@H-Ys-mlw;I<^!LS%s&o0y?Ey-2%qS2t>!?C@BGVC;#!$iwwm#C{K z0S2^MF)M1A)30gP_88ee)cm^$0j~Djj)MWW&aTTh*>s(Q3AZEf(>f%_$ME%cp5&f2 zbMEN%Jl`h^OHZs>;^$ZFze)=vdCtd_mg?82eoY^P8GgRrCh;>@sliZZ#8&6yR$a4b z{{Wm<1+`b680BTaR%d0me>H4rs+ICyRMyEAy&bJ{=%w%n#cfXsl8MY%_R{2ls$cxV% z9mO9eYeK9TJ|<5j{fi7B{LYI@MdxK?!>>7Oha0`3l5glm87_uMm)E>o2|`ua>hn6A zG4ah)F=NaAy9BH|MC!Ym;c7*7r@HZ`94j~_!Q-SeyicH|2Yf4c>~TpsFHfx%T}|9^ zVaJ6I`oG`=&zuH#|BQaaGmrc7==VT2)7gg%v=*(~SgVPeCK`l}b`d&TDx{JD{lb!h zf^ccyDrJlfBbW)Qw5VFG)10*-L)67H)Nzu8yJ4Y>TLsdlcP?GlvaHtZ5O;d^ z-NmbI)B4{ip*lA|GCd5_yvN?1Ew9fZD(&%ks>)hJuia!)N|qP3J!%zl29@g=4CeUH zaj&L>l*7Wp;-Wau*!%+mf?_cL4VyHMeCp4iev>S(y)%&w?|LUXu!$vUt9WQ`jEZh? z7udzSTr5}hjoCZyE>B}j;aCje9_yz2o#ubTe=(B7_#iC55>)Q*!-2Te^vgYWB)qFV zsP;(gr~7aKve63X;oAx}k(w;O`@iT3YBhjWMOH`Ha=G>5ictHiQDxA8z@9VF$%&Zj z`#3h{dZn;3$ZHwpU+l!U!AdX?e(Pidc7MOjb1tzAI3cX@J1hq`3r|sqx#cr$fnWb> z!;uLlGU)WMDk`voatQ%!(#ZWyU8%}(9%+Bp0@`B7Mrz?-8;YITMlx= z0g6MNLj1QB9E02|NnPPr%X8Zn5etOTWXqVhC(?so`{Vcj zCN2sJ$-KPr(qI|Ag5a=?5L=)VR?P<7=5;{hf$l(cwDG&$dli~^Zb3oNSUM|{*);h$ z*R|A*JGY*m9tIYce%+#Ki~Z-Wmn#H7G%wbfFgYH`b-vzd0N}m2xF{Ca4^q(*R#(Ss zX=y3fY>5k}Zw>i--_IvZ!^6Wkyxv^-mc+}-J z{|z__!&B?eY@?LNZT;A`tXL&dWC`~v*>Qy+0b<&GkL58LjX2uu>}>n{+q0(U<$zQM zM-Iv=(Ww7nmV6ciP~si>(1$5*r4jkOpaMnP zABw(MV*nGDE2pR!uxLpEsDu7~A;62_;^GDd2mdWALbn~%xpK8W7$Q&KDbswP_cwA% z%DkDwK;i!~yB^d&{_XUtr1?8XEf=-Rm)c(vJW5+-;>~#vHJwzBSq2e@jvUGr>H{=Y z?Y(cW&t|5k*^P}!AtA7X(L}PYu58KL?qBw^e0avj#sY(az_X`JuFqQXvc$k!g#IL)X zPYEPkHO~b6!jZ*tq5I}=23$;9diQU+cigXS15d%Kd580k`?%y}QAVdy_>cu`1ldz;>_Q_%d4x0$rO>1kbnpD@JUJ{09p`OJd%-)mi@qg zX#AccMN!#y)_M+f^|Kp3Y@5-&J=+T5GGW+q-nASzmpeyxgDvQi-cakX^Wm3!9@&hI zM{5l{D=u&b=>;8pM2GlyN2OqRJa$G<@R-@zC*m04%eXXhSFmhNGnmroxY)|ZKh zU}ZCIi+(a3>HL=~@=dEouDP z4KKb`4vx2O61m&aXRv|XIrixM0WBCuZm1>_moNCOq`nJQikM9SW!>Oi9)wvoOFbwJ z9KV!3bEqEl={7#Y~)yBBs7_n;2yE4X>TfJxruutyHVGOw)lPGZF4L;9T0Ov@_$dS_gp!Hf?XrH8NbpRwj-uWtZh#xdGljaO6->Pzd~=+(CLt zQc?m42is%Ws)324x=ydjlYS-<5tYN-mzQScWi|L7<>=^GTtY0bwD#xKmBKVkM1d-2 zn0FetQnFQkV`XI`oL1EgDhdj*_X?CKK^!^Yxci@Oj{$fhrlAREG#J$VisQbE)kZE3 z0CsR|D+ho>n`jM>kB@-gH8wV`T0crA7k{Y&wiv((#)Ao@$F(B0H;#ACM~{;&_+7@Y zGBrURyVrAKvx3@Hr;ZB*%uNfS5huYiM1C^pLiN)j`b;%0J|vP!RUD=ywE=%~UiQs} zF97^{Sf(sm?g5Z!e7e-|t9GCWJno;v*!?Vf--M6{ff|Qy#D5L?uMN1McWeKd%E}qTco17&9~TSJw;O zxVSig_j&;wTUZb?GWr|ESX(2J4+(08z$gX3EIOAhD!JPUVTBHIM59dDbBcrKIE&G`sV1NjtU}eSjCE#?fHvEkSh{d9x_VD83;(s;b#)huFy*&^O zF->sA9~_thDaqsY1aB&f6&_Z2kZ?GbJRp6V?Y2|C8Ns``xg{hfPOYp&(2n4Igr2L= z5H&TWVlo~jrlUKxMXuO@B?DmD-yeeK^+qv@`%aZYDy6$xuMc4C_KRKwLlYBpAp28P zLhDnp@^nZFF8xkdQ-!I60$h-JBNF@R*L{bpKHNLW~2S$Xhp zp#$=?`SorY;I90lqU(NiS)f;aA+ZKhnM@Rba{yAMo4{pdv-W4DR$Edqk#|a9AQUwX z&Ea|%@BO+DZ#35xEI?R7qM~&k_ttUo@jx<73am~jI^|V2B=W^DO;z(7FOW^AFqt4p zOH02T>%4CPy7LxDmO!Ar@r;pDP#F+g0|FCZ#&O`2zax?Z%`zMpGq`o^0$Td&%Koo* z0cO;e4Ijv1?3S&X7%dm7k(Not6@vFMv4CyN4Vcd6fKr|Wv-B!airhv!>?Osm)6*&% zH?|$;%n@s|v#Re_+MF|*hmbHx_}mtA<=|&c4-M*w%g!ePWr$?N#6Oa-V3f-Q92^jg z9ne`kAd7yo1mq`p2g0!Zil|knX2|LfWZTj=KqkP5{E@-wa&wu%U{!N+n)pFtMUo%AC z`JivzcFF>HWIxDC@R4K&Q9y@X4q#soezN(h-w%6rbp=SxogG6!AU|}yKl3Vqj23H+ zfE+)A$os)>MwT}ND5aRwBVIz=-tr_9EGlxdMO|5*oS*$Vr2ngDK1P&WtRdw^ZOz0QAkw z&3nxoa_5hMeE=YIQh5!_)va5cG-U>82Ivs1@DCX>!L%a)T9AwP8rQW5<=1sKqGA}T z?Nv0b>94jraXlU6{q-H?8ck8NWhxF&hedg_=8UOQsyx#z2l` z$_$(cut!s-%%;qE7{YcfzV&pJ0M6iv#R)k$03k0bfKL)liBX-nhBjWC3SZBnv8qLSgrIXn;iKI4A5LPQ>)`46jH4&8udjW3i)9%|j9fR+|P z(1+(5b__WX;4V$g&9BT0VvJIOYD))9K%L{E0v4TmU}@=!=k*vcAq0S=281&(8C!oB z6j1cQ=w)~w4Kv;LaQ}AP)<8J>{fdJc;I(FoWQ(e+V*p698=-58Nle^Dt57VIB4K7u z0PJQYP>MC~x8{JhP33T+28viK^pfHMA(sli&E{ z98#2*@8LXe4-m=A0Rnk|g8|j=)vzQ^2usj>^WdJ+VM$$fEU^U1KND|uW2ZoRnXCq1sX#nFE*A;wDBtM z_Jdl|Ul3C)l5tWC2dYZ@?qsn~MaOFZU^k#s0DJj&cCTOW*Z=wrATI53S@#mPvtv}z zwEJ-0`AlD?Tmg_HlU^^Bb?YI;UwAGpD+2-qWMEQFmK&`smm3iQL$V#g^zgj?zD6A; z2;3*Irwu?@EU*U%oOlATzkm+>Yifa|V+Eq&LYa)=f@JN#0zv?wTtFfHnVq$Jy`6U$ z{!&zJGC{cJx{YYvvWq1}f&Mo;85kHK|NPl*Mw(S#M`t2g$2&Ba3Sg15wsW4%dj=F( zj^6o<77q6+QQBHjaV~01ko?qPr0FeQ1h`0q2qa!)L=TGZb(6Fm8`8rJ2L|vD5^nWZ z%}q@=hKPK2MwupdfI;p9K5!Q>LR$b^7y(3_kevJ%87@{`)?3PT)GLwz1S5HAXj zQvlekkxZs903wDG#Uj8s3j!ODf1Mbx1bH~*(ZH4ka4&8f zelV`Lvnqd+jZ`)}($Z1|W#zfy^;s2wnI7(zZ61MTblZ+({Tq24&sF@LCx8KMfva8v z-r0Z|@2>^`wuuJyYyM6iDj}haw>J;KMVndgFO(`;PM8ST^oIBUkE-tg=dy4A*3eSY z(n5+#S*2w*6p|tpGAlb@FM$MHP> z=YOZ4>vvt>@Av$ipK)G;%^?5XD-O+s;SNCzkwX!nN{dz1jYS!qmR3`3pc+KC1X4pc zXpfEt<3u`a!<>G8_)(X1gHC?nQNOr^EHCrCe?cUQf#xB~>eG< z)!DV~>R@I!v`WIjM*_HUKFDF2KDsu!W81wCdiJb~=DTr)4mQ-s$$6)W-yg|-858%f)D|q|7N^luCr+HmXuGu$2li$O z3HEqpxtZyIi}AbM*J{1RT|iqO5EcgDBe2%>I+V_ch;pk6iQ?ZQ9RO1KCj0LN1Qejq zRy>it_Zz1RPQELuQ4Dl^;CWrettMjaK$FEhoE|41?0s=|Q$lMZ4){+I5)Y}`Y&6Tb z{^`@nH1lurPEO*r8dl2^bAB$VIQW_wui?l;S#^OTfjQK#W`x65;!dYb>x+XDK)5F~#Z8M>BW*{o88izCc-KSlHz^ zHfAl5{X+;v7UK9CUgcMo9nvT;p5ERaAv(U7dcQm+;09E-czf7k9CNeGW4f6h)(o(| z_Wp-+LL?mb;|$yPcVYtNQHCn+RNGtA()e=%+oIAna6ehtbyv;v~Y3+(h-ft4kj!wfOc5uHSfo zJc5ETC;-6f_n^^ZUE`s9)m=Qw$?{v$&cv`<$9HlXY`N%X_Qu-DD;$FDXA2(l^t@J+ zJX)x@n)%k~9NDz!>fS|5mmCx2**|aIyyq`o98r{*X_Y#5%mO88xXG#_$*6iKmemeE zzVG0P+h147ceFSTb>e^H{6z~RedUVKoH=s@`ftg{bkLsy2z3I0E#!nr01GNuW|Q-M z5F01tL%;DP*^%EtMqzII8aj z1(LwjnFDE!>vmmau+0vqzf$0|Y~3zbtX-ds4B_cti7rXF9+1($YxAayaN#dvW7~m` zxOVRRI3#t77KvSp{dTXYj5On^iM*MzkAb|iR6P?@bWRQ%j_%%=N9AZ$Zg9&k-E=VQ z>(_JFrbc91uKhI?$Y`A_FFtY%-(d+GOWTJJAC3onPmU7p=jG=&-TEb7nHMCqaMiO@ zI$fVaaR&=KuU*``!PC#rpee%=M=QW!$;Xe{l~GbebjI!^(*bBc!8gC%K7KqwC-ym| zBE!8xPQNXLgoNlTk`TIJ0g01WuV%1-zxmfk*pz3bNag*A)EOFHz!DOuyw%|dCwy^7cJH%?#JWFy`V{^F|G+@- zduvdVg&3k`{JKyR4Z6$2dww~_O+{x|3`~ zK+zz*u^w!AQc}{=4SR3@8SMrZI|4Ha=y7f!IwGsFHQ2-Di_xOo<@Us;O}X~Z0jD%} zvE46qB_-RzCRqS1wEA7%f^Q+I&fD^xM|d|K|EMoiXZzXq)b1}bmm@rJNx~> zfvA@zYoF?gy@=;O#`OZ;8AVk^c_}Foy{Aua-kQJLa!Oe8=+Rtec@W~Ns;UXXA?2I= zTwNJZ!+UEJ9~s-d0%5Mwvsrk!kbTvvs*ej#1?g$)=zRF}$$IfuFwkf;H0<$TC7(WV z^Yh178%bV&Tl<``wZ8bPXVuTX!O{k028RZXw>u;y{uDZh<)w>;+t?dN$JDko44oXk znf{!S!QI^*-8A@$Nact8;54#Xv$t;Digw_*t}b`Sd!OjYNLSq0X{b5qRy=RtzK8|I zrL2 z-K&fo4aJ15UByud<3OO<-TK?wZ!HQNTUr`xcA@Y0Lid}MhTL1WNb=dTuyqa; zoa99#$ivIawQpZ_hnvvbX!C@66KTcOIOySn!9ric#AM(1A|_<-<&{{pxyE*T3>LkN zjO4@?VD>0nv-oWK0Iadj?=BdPOzove%PqzXr@+T z3_vdSrUwWh{fdZz2M501Xw(!Fd=@^U))-fDHiNg@U?Usb4>gl6aSy8_&#e@n|6QmWUp3D4s0d4e+!W6u2PT70=Z*|T#qLBqBtHCh9u6s}Ix^|rgQ6-5 zuZdz8y&j{P=^!_jF(mVOkh|s{Ykh*i_|J^^(3k;scIJgRe>FD>rBD8_U0D5V+5CkI zSXN(rx90YEq+$H?7tt?@PM*+MDjF|6b$XG5JHBO7v*FsFk5Vz*vY_$oZDinOb>ng$ z?@GKeWV8BdY~zFWqt5IRiGrfHZH*>81}85iiLF*Pt2?P^zsBHX_{rlJ8Dgtk;)_1@ zEmN=4JpE~8l}vP%5wl3Tl?SJtIDc4gYQ9NTL}5Ma^~iL8?!|G{C+zJrSsp>md7ICp zuyAdDT4&_JVOxV7-u5HEc9)q?M%{HUTsOYivD-{*y2rS^LuZ|s3Fj}LvO0EV6WfJM zD)F5Y%hW^c3%G^uG5;zK|-6t}$j<*$?q)@tE4PRkrSHXk>6+UR=AXZ>qTUFD1FBLXSom z1nMHTKqhn^P2QO14!w`bO*`-3{cG$a-H{deLu2!RnbMVngSLqkxresh^Mv5%nd>+a z0O-|{>!^wrX1VwAkYK#pW`^F>7%(`Q$`|Hl0FA%Bng)ty)u;J7RNwytH zm7KEYzSbzJDV9x!X>v=;tlY5rw)N@nqF`Jghh3?Vm|$UN2hX?6uGwa-%hbq1T#HF_ zRv;`nIKHSgs)&$yS6ocS*)n}DFom#YE?qDW0{UZA8xUl-nAyFmjh;qD%?+2Aadwsf zRR7VC>Wv2lBm|xCN_G|MPBPJ52;4LVW&DnNmK}1VR&i(gy zBIfB+@VZwZ_%kvxCWNFX^PN3+4o>qYPoAK=Dn+-C^0JgeJOs-b6u=D#vpG3o_wU~? z)V|KTNPP1f4=lIVjRT*8oo%=Lk}0t*me+i9KiKJ0ttogO8)q&TUGt|cUnQMWJY@?P zT2AinkoEC9(((FW?2=!n%2wZ3sx(P+tH~e#Xu8yoUH+~>iP9shbk69?+ggdR9MNG5 zHCNKiOaOO^^lpfvL=mIR=ach*R>$;29xVvfT!(!m=|jH#-rYT#p7+lONRu!u=;zST zPzVoTEZ}h0A8N^Mg7@Igrb8`yQp=Vsk&Lb!E?j;HVkH`@Hh&q8&D`8Kx5@?Wa+!95 z{0l^EXL@83wPGd2OE^LBWx&GXKk@3%IZwE`PI z%g9&<#uMF7!_NKT|K%axD2Tl;IGA1XcGG6wKYz3otg~bmoKaHR|D&nw&6g!002rm7 zYKRz%%I;Qt-giCTY|h?*ljjT4b3Rn?&N`;1m^JYc2^#EA++h0eqf=rX75Y z4;CB9>&(s|^^jec<}$JJwc<-lC)SR_G7#|aVJWz0*LmqvpmsAMf&cD$XnqwY2GBuC zI>lGU>kURBW$!X;5`L!esN8D}?{73vnc&l3ynOln*+VmJbl>7*pN|qN39bXiq73|A z)tsxlVJ~R9Qpyh$qvbRr1#O0y5j^lF=ZR+Ybc-JyJM!r90BaV>)NeZ<^vG|VcTS~g zQNtIVp!mTn{KLBO1I$c=o4b1#^FE5=G5MCVNh!$F+vnM{$NoRqv+Fa{?<&sQLN;y$7tC z-@(a6Xz*=pY~lnXxn;3`ST<}Rc3U2Y0rq7(6tySN{F9S~&~crD!lN7)2vi0;KM<$x z$NM`_kRf}lqnm@jf`tmY@MmZ6g)>kSL1cIS?ARB25*psu7iaM|R%a_Lo?Hr*C@N}8 z(h65By^KrgjadA2ys->eJgHQ1*3I{J7#ci+(-o3~wFcaQ=D5Rpk z%Gcn_?fm$kVxRP%n@1|wMD5=%xo(~0(w742r}DC{9cR0JK*Y3RueP?fM)ok>4`NgP z4&^-}O9lKktED#Y-yZ~ha~J>${4=QbrAkUlPQ&>NHLNS8!mhS^=8U}GC}3&0YaR5O z@130^xC<8FoG1K2b%cpnsimF`(z6|CC?c5!?pj%ZKTAtX(vScTu`@&W8MAH=w_XXl zl|(Q=$>`dCO|j*WWGqR@`SC69(VlW)C;-RNavZ(2w2f#S#nr^73GLaVnr+LE&bS~v z(y#RmXN7a6mU(z%mpgn$t$y)uVp;^!v8xZMa7Y|#wI5FQ{o9+#Ev4Xh}(9|Tj<>Y^x&2fu6AUAQ^4W+ILU z+?0=5^-TdgxAkpUoh`5Fre3e7vBgoWuLoouC6$cs#SBF8eegJH>u56cnN7UuLaFQC zi*H>k4RCTZ#8iJ?Kb*rW=GdF6b|#?Ra}kqX_4{~T?st}dpLPogM}M2K{dI*or1H4# z3}3(k=7MKu&nv$QJHEp$PjAZS`(0fSxsww^3VGN5XVF{1n~k`V-`bZ?EG=l)xPdvD`aKe6-xYf7*s;5iZo$PQ^TE?Z zrT)tk*@PlJknwleL{<{tGCiK2hr)IQY#izp8vXHi6XzK4f6)asmUUR~pfdNT&0|yD zt4s5s>xf_(JU;4?#g}S&V8nvMG(O1mXtKTIw;kvAZCD0`rj1QRE9=w41AdSoQm%BO zmpvg2`d8B?<5BI|?69jwiK9;JZ5F~B*YjrGoo+KlA;H`)s?F1I>G^JqnK+w1cDeRg zGk7&VPO~34Ir{5uVe|nf*4#4-6Pv3VRwcLGG*YjOeVlsQ=Fn?)HqpbxkSU?%6)P0^ z4VNgyaU6Ovht91ze=o&5Ro#xoXs?jX)N!GJRSo=H2tJgpaY`9gmoF1`AG>WGyb-Mj z$J&sL8BQ_&)Q1PmyHJ<0ns5Id%&xFe&ok9B&`{|P%4zxeS|ahU-?hfNSA#0LgCp9_ z{T300XCI8_ctv499Gj}+*aoezwYAl@_&myWjk-CVEyJi-F!L0FtSBIX5AF9!w49I$ z&?16bg2E3~hMB>;2rcqFJWl9*(7o$K1NoV{!6kZt*zSda7d z<+ho{cg9~od~7vn`M%Ul&|KZ=19G>b9^A5EUN}py#4S1s9BiDBsow@CLrbUY@zi-~ zH~d@iOMS0AxUfZ8Ha+}K_`S&ze=a((9CBNf`~+8}3)(jr`^Ai< zU5+Sk+i~jU(jj$-G)cy_0?6(Zf*`3FZY!+p|K;?%%>yUXa-c*w)s{<)u^jq0wu)Ez zWhCwmV1BtsHydfHVD7F$Tgbo)m~#~v1Hv85h6!&bjc|h7!_SC=4~3F&9#{#~5wMVz zU%t4Zr4|wj09)7M(9fS{QeSXZL4uL0$Om-qHwxP>w&d93L88rG#KhF`*n?vW=upVc z_ff@IWdm;e`gUQbJp*}5Bu%DOcYwctK+KkMn*>GUtOF-1rEnfnB{viOQwMMrV<999g&NWf;~*el0gHj zI0GVBG&8PTG(Xp?uhZWA%VRmWzk22ugr3?fwMp!I5%W%J&y~Xq9$$YWcs^ijD1+(Y zZm}1!L~@r3WZwb&yHvK?dcBk)5#^BnI^mIMjq(;~zrK(I)|&Cb=2CbZO2~!)_UZ`Y zW~78*d%$O5Rkp9z0{#VHeHCgy%0D)U3i?}Pw{|xb?=5# zan0kdN1HZ=y-qzN`pquSs%>yd+Xsmb(0Ug^{f9Y^*K63;>%m8H9d-m(k3!4i5)plN zE++j;+TYBJbj%V1SqeIT1izjdfn>eXtJ8nFHNB67HJTp+wLmJ>=g+5sH$;doCYl>p z>xTpe`Z&kIOb-<6pI_tzM@Z`eH5l{g+Q*INIr%8?bjn{gGkdQqGy;X}l~uRAiX$?f zSaRaOTW>6EcT`caLituaf1Zc6fARd>xB2!_5{3HaZE;MJe$xCsUR!I=@(5OJS5$Iy zbGvE&to3=ZeRjG8JorYbSBlx~KBK9EUFA3SBEfi~a;2oCoW?3;{!@iPxN+{>=>?7Q z3RdA+7+k0Rj1uaIQ%UT)xk~gypy3HOC0uT##A9J>JN4il^efnDXem44j#1k|+W?wrWNrTrII1r0Vu9}pPfF8! zhnP2CDcZCM-h_;~vH?9{{^8%*gsUYC2p2i*2O?nbZ{NA2`>U^7!@e^R2X%_mpDSoC zgh|AIZMXso>wEnaOXxFJBzK}&x|DRjq_k8AiWNMD$+iP(=!6WyZCIfpn(x~W6A;Bt z8!pF?28k}23}Hay%Qo&GhTNu;QcoOo2;54b7vDjKd-zZebs<>Oc0F;IFJImv0-s4R zheb7rM9&ZSRoJ^44!uv$AsY=bqSdvr2to&>*60^6JP9@9`?&$A-Z(2S;*PbS*>npJ z9BLqJU9-JcXs9eeKe3kR(sgxphjDPicUGbotoa^H^T+Xl#>@_1F}f(!noo}oJzBvf zP3{!(R3lCNyYbsJahuppR?fvtOiYWJnY&OF;Suo7LvZe#vNA{#+l(1TMz`pQ8&>47Dd>S2YHvXiNJo!Yq^ofDD1Oqn^4RULRkj_ta5L7*|I8M9(EF7i z{A&V77p}JxT`(;qv`OF_OQ4EjWic@GwLXL@Lv zdP1RDTUQ5y_Ue=b^;^T?>-xqXLQO$@BcrNO35+Eq@Ft_jGYk^PBoMd-@+ z)ViB~6x-`>`R^8=#0DAy>884rb z-3>h-5A6z_Vqf}$CP=cYSFaw1iOd3a1r`J#&v0Iguc@*xo!nwjatBE!DqyhmYMth(ZA^7?7H{Ih|P3OHK8`y0$6^ebiM zhzy>Bnq$14o0|9U8kbr;-QXS_dgeS4%Cr%(W zyAH8ZAUpWdDGlH-W*Ee;4n1o%&|(wxVB+&o?lNB1AWi^JXk=`xTo_L8V3IeT@q_>= zYm06~@sRHTY}zo4BL3mwWiTsLAPj;1@$}_O^uxGENQwsWsk#A^G`zkn4m%H>ZZQ!K zUTEII29%+tPdd*B0pva|&KH{mK<9jx^$tATckkZ|q1A>`0pU@7HgQVEI#M)gl~Vxb z-R%_#SL^cg^Ov#--%T~HrK+=noxKc%RSC4G4mIIEYjuDqoY^U;HDqx>c_wRKTU*=P zIr%UXP(mKT&h5K*SN;n9AKjRooD8r|p~X32kA46C-2!eos~S~^4!1%08k{n!UmA}JwOhs=6D?|d@x&sH45;UHMIAD;sm{fzVx<_ zkB#czFtIimZY>L!9k6)6-hhRL0Umo@W^q(iD5CAw*ebX=70O4H%7DibhTChlOuiG1a zjywXDp(a7I6h)MHICSJ5O%lm!@pPhuSrjZLUm7fEWcf0C^GBZuUPvV`UM!XkAmoB0 zn@nohM~gr&0c(>Dxvjt*M~OwXk_re%IIppG;jZ%Z@*?^FXmlmZ+O;w`?08jJKz%^y z!?kInE5%96Dxh&3axFe2VW=3F^as`-LVIg9tyiG;ByTJd#@w+)J5BHaCz2$| zY~tkPbj9j2c)pV6VLdG z(HmHA88G~)yW5;u?T!a!1q_jHWUjBj4ab~JaAB^WKFFZ#hKr4??{G@IgJt+urJ?S>N#t}tqKeDd+V010$4_Ny_ldGG2MJz~S5we6y_jSGH!(4uo|m`Y2GN0*rxgr) zJFK?c;2WWl)FfV*EjBo2ckuG+fU(8?pn`!D{5@!ZlK>c4AO>g%aDKu~0n!2Xj2p;T z-T(&o#|cA70kx0>aw{~D@l-_V)CW0qPN@^Cy&KdJOtM(F3XwcZU_rjaE*2CP#-<{! zrKF_E^j2Qt*EtZwz$Q2(LSa1&g*y=G0`g;lVFP)a=EcxDc44~v(98$ax}uqX-)1nn zgvfXDn-TA^ckgP|Aoyp=8;xp-#H?`t%M{wJF4KRuLzCUbK<5_ z_;_TwS7SsIOMWA>)`l$0qw|F?{#oIjJ6}fZ4c~ZQzp%4*bJ448_w?+`=0|VB^0aqD z*=8$`*jTU*@D$_OJjMYQuZ&Rz;-H(*1)}?#$hfHl6btzaT((YnBQ`v4C-Mn$_tWAP zVyZHmSi*EJLiXn8$8qcqLM$pO>Mr+()ZOm&CfoM!XN|7xG0H~Uk(vo18CC@R)j0WX z{oOcfQH`-XP$dOQUy7BpsH18Fh@p|Vg(*7Kh|+Gm_aZQI?*GU z*b^5oa$*NWm&VjI@9g#d{8QHMbV zX170Y=SyxDsbZQ#Kj^-P`~IwGZE?8d!O3ljk5@IP*mhs;W%M?2bnNFCyueWVr^pZE zSBBRBu^vCZh}FB(;(!zhv=qd}I4)ler-qdBNjZbkzk2=yOm5Y={2UtvoCu623Ljvg z2~tOl#d~)gYGR%$y5MaBr-oMVhw$92Eq5A&R`p>XqV*EX5nRjsH;>O zv%X0d3}s&BTqMdzpMaq}U#&iGaXhB^?=P3GV~UQvwnH z6w`}|vTH@!{~Ju`W)?F~<>kR0Zy_K5mt)0X1MjiV+KC)%-b!5dAMn5B?Y@zeE&LV< z->+Tv1~!^1%_|#<)^2VLy!AJrOTqrXKb(7HxdrzICg&B&2aA9Hye^=A+1>BI@F1dE zRle^rEiNr{hTt!IMUbR zYnD`E8#Kt~iZ}fH_XnITul>Lv8T6REWyni=K48|_9LR6PIQPhQG(-1;g4!*FneEOx zFoINHx|CZ$K>l|H@@YpVKCilWSw+$=RlP>D#5R5SM_)vab5RaIj4en$em`Bg$my^Wu@^w%lX8nx!N=<9b+(z^ zqII-}ZST^n8YT)uL&JUd#`|SAH&!!m`j=OL+rufla)VFT^R>wV+)%rg+wH+uY<=F= zI`n>pp7@Us{6^VAEVGZztwkndH7$#YFXcH6)ypOU}aQ~zqqnAk1a5P$N2kHg~z z?VD`Y-zOE+bhF)drOw_nl(1!4M*ai4HM{-%Obm2fZ59kUKJjeyoO>j_Z7c@pN;Uv~ zAs5e?-84Qx^_T?R(xKy~5fifBCgw8y!`*8Ob+p>lu=G=^KT#HnLK2%h6Ltt%3 z8QcOv2Q9`Us$*ahHkXp#P;I2Sf+eg*-5|@6BVM_2r=N7R2si%gvdy)LE9H9?dYgT? zW$BFid~`X&SMAB=4t*Yuj(VyGpA3}m#p)B@U}(@X5NmuS=HXYx1sC{|JqqXdmEGjW zBn&1@zX9J#Hgbq>_DK6dWqbybiyWAgoB#|X4vV~Va9^PrmV&4^)Z!x@y&3)n7?8JY z*-{5~300ArTF`S4DPF0?<9c?30R_p4dq6E16e(*_c9_RZT5{F{3nLf}#H)=g0T4F2 z^<5i$PKF656~BCn!6GU3Fy$#JQRW;>;o+F7-PfkB!omwax2@0v3JebqPbA6+x}LLO z8Uf;99|dW0H#F2fe?d8I@alj3SCBOi{oIFIZ z(UY?3)3zVBDL#Zs;U4!qLE^!)O-_0aN#?wgvSkD2ADR8#1;B#MR{|Cy#PGmXEYHAC+k?+S@&=9ND|`$!M94h^ zaco=|v(=WOI?&nSv7@v}l$3V{R3Q8B{xTtAb^?faeFvo64giW0H=9~=b7QzV^KXLO z`PguhiuViuRpMwn5{y=WnK{OHwBF6~Nz+;@=4U%`sJbOoA&xg&k!$ou&M5@~EM2x z`k8Z9$xbNmLIPv6#45YjQl>ID+ASH}jd5Pu;DRyY0DmWAn*bJJfjiTOvNBlth>5wh zYu#}Lh3!xb(YfQE5S0NY3x-MhD~e^)CIu*g;J@H*@Q24A5CGHv9+pX? z^VWHP2LYqit`K;W0KKu~;R`*SSi5TDMkecTGZYhs;Rnj=dtdhc6Dmr9Ait)kMit2T z4+O5+I0e_ohnHHp58%!MQ?L%}3aS*m_oL_-xRB75puD*67KiqaIVr33JUVqoRb`Y|0#k8l78}TPP2P#ETMhyd$(x{ z1k$Hy+E0Kfrn}+)5VIF>XVB;wz=VUgjrhWdtL=*+i(}LE^%-e7|5AcM{yA(hII0c6 z4SM|LLeJzD@l3&J&x;$igT1Ez)^IPc&`U2)tI#{#FU6$>~fO!$$*#srK2qe?sQr+ zA?DsJ?#k;&!4+H66=bZoW*BkZ*`6t-lr%SYI#Soxu?QGb{U$_1D<~(hmv{ z@u>&ya8eC(>xNgbTg#WkUS)l)$(k(DtM+-#&C5qQdlm>ko!b_y~Po<)bY1eB*z}(QPosN2*3;qN?zOx%ur@za_syRQR6%U!dn|E?JWr zr+gr+LS@a)PYM6SKF%HqhAA8=l9MTQaR*L4)(ZEc^N-WeJ7g_T)!Rc(8HLTZ+5=w9 zKaym#wnwur+Qe`@GT8E#$7CE}*sSv?s<3%AfrqHuUS1$n>pIMQ7WT@scMp|cJlH)D z*Oo56?CtgK1v1K0>*324rh?IEk%(`({yn zx!Jx(V9~|}`8E=351pTT-HuyUGNy`sP=CzGW!Zwl3(^?`MeW*T71+b%{ubP9u(g_> z$hPCZkMzoTOIt0$%0l4R-p7;u8!V1jU+bFqbr2@Dyj|Z}7 z&#%eCW;S#V_dD+Vz?p>mfN>aNYwMiHKQ?Xxyf0Fp-tsjTw`eD3Xw15l#@fuh4s-ry z!frsNJJ@Mje7?s|pdYuv&+p~;ZbymD((d#H=qU})aT(IKJIki3(CKipX);C1G~0RT zsdQXH;z`coO|k(FpZJSa6A%5elTmj_R%mXnDYTJq^V<3zzpr6Ls)R$Jvu7kn`^?Hk zQ{)0g7oS)9A-QRACJZVi+IJ~MM9RQ%q^bv7)$Ydp;UewcGmlVW;27*3Pr4#-2jgh7 zo8@(MbP@$Voly!zO=4q~3GTB@%gwEL_sup{^NIfKfVbTJ`&;m{7;S{{9^WPKQPoDj zUL4ZINCZrPd(}9=(b)dZEZy$UD5ik8*>?SHsc|bGwc(kZN-Bvx_`P?Kd2GynzFRZ} zDM6*ol8dnz)uONNt>3s;O7T1xe1zNt=JZ9toJofmPTr#s(#& z*2J2b9skR86a?-oD3xIT3n1TVZmBd&Ct=^n*m(IL6Re;ABD@Kuc55(c5rcKWAJGAf zxLtj!ad54J-C$GiWZug6>+@GgszfvY{wp;h z@1=i^jwaQcfB-DsdIdgJd?)nx00g*mG~@~7DfB6f{Bnwuc+=n4hZYEU=P`2UmoFcQ zuEc^y1b%pU_~^CaiGfBL3yW7Wn=yyM17j%o_&%jph0fMlOirXAvc+MR>J(NGTpNxbRLDH{6R2GaZp=Zh zJ&FfTv?v}PFE`~l%zS~@7sOgZ356vBEB>6<&^QUlI&e;cb#}8a#&87M_~um`Nhz)# zbI6suFIs_t+hA4pDcfc@Z*2nCjy%je{S+{$%pUVF?2*<{dSM&-418hSB~+~13~=bd zsVy)Cr#FH*g=TP0$tJZ~)x*&ld^Oyp`%N2;!ElHF2TKEt<85#6XyBrhdb$>b^OMeN zblVEEK?H{AtfAAro6+sQC-TIl(Hi$L3-?JmK9wRRgm_#Ev zU>_uD1yBP7<|5z@nC@_*Y(ba1`RFZx{!On8E4RQ>3yq}7y6O~7iXjgq?hK6tfn@z2 zgmg_`am&4ZrRXrB%l?F8766rc7^v4p2+1P&nrt_ug&_sV+Ft@bqG;*e$fFcYH7)1jtH(x7uGkd%sChqAcbv2<50n9WA|Y8 z>yMSjP=8>q#MuGMGg(B??E>=B>=-l+O-&lG*mxJVal*L-+Pmt@7fD;&v{KWH7)}Ka zH9c80`2PJ20euYrYRz(mhED(sj3!wSu3dY>dxm;qoDOTzGD3AA^Bo#nij~1g<-YK*vw$hHFdNQ6o9z{YYg@W+O3s_ z^I>-?{KXy{AFl_$1CGYX+2kV067-D_Ccs>3EExC2%WMA`%oI7iOxnoVFx`TExmx|{oUKHppOsu0mmzafzfD@ z(3gkfZrBJz7|CK zD3p8RHpl>nC^z};$-GZ_51PrSmg+@Kj>LGKva-U3gOAYAam#LjD+7H9AFH<dar<(k033AtlRMjspvWX zwWY8e@&O>cBM}UOwgrJU6cc=~;&I=y11nfBvN5S66OzL+Da9Q9*OZMP9V+nB+$eF$hpjKcgpf5$S zYM+eSSIO7n!W|LBMnl=ezeTNGV<~^#usT6G{6gI#+@>T}~wEx^Aub^OnOF+Rq zi+LYZZU8+3xPf9&ceL?2pj$AA?JWec`dr5Y@Hb!)E>{hLeKoef#t!5QwhSasB1}65 z$GDW00--`7gMk?X7*T*|ung)o=tu^%^`PyJ;t6lNM+2haFz4apdjc8=UkcU(aFIm6 z!8-@f4Xrms>Oqx5x`&GemICecMv#DyU%$Q&*j+nU5C#V9D2#JLBYgVwX;&R!PYV5>I7Swh!w@)$C&hCD2c+`%#*G^U z`%u1q*<*45W-ESzoUQpNxJr-g?MTzYoI;hWCqv$&$&lTBf5tfe|@= zmErRFlDSLfH{Z;^co?&*-+27b97-N`n$3~}ol)R|+AJ~JO(AIQq+M7YV5pd`MvNW& z^L-V;TsZ$|-V4%F42G$nKY3z;+#iDz0XOSSY%_UONT=Jgev=X&h8dvKCz;xvnR!zCBqK}$bVDnj=vO>_dHQ}t^K~e zdlA3+e!^}X%yj0ILb1=rVyp%z7cVd5MW7ht2&B0vIB_ss9wkpicPh;E%6R1;35zqD zny1h?;IN3stZdvGJWaLcpSF2=Lt|TQuBn7o{5R+F)WiLcD-Ge}$Dp^lN=js!1Wd+A zpi8J;Cr%V-C*8%gCTNLp3IIX1*c=2*gs~$bAwkI*Pl^Fn3+R*5s88fFS-22>zMhpI z%8NY8dzP7UErGiLZ2_GwyLbEJ(5!W@!BCZBJGFNIeWH+ z6`WAKuY9N-bsmUh8s(5Y9w?!bXU?p|q?4^HYmqwwO+@U{vu9`2)%(@r2VZ-M^j{HX zqdM{G)h;|7GX3IYryW$=kE4a0cnRZlngUPJ!~`0a)m8DR0>0wgG(V6JhRDVNkCU(P zi*afRA#g{7W$_~l0B?8&KnE%ND6XA2*Jz3YeHO_Z5Egc0K^&YKJ{pTLHHf^xON{D3 z-AKSa5;xGtg1SNLTCOZR^ z8TeGJ3JsKM6ZX^bH($Om)8=^9ibkL=JkR`OmIK5 zdGlrf`Lil2-9HBgP**6egIpr64u}N>j+AViB6x*BW@f$3#uj_UcC8CoxzN`^pq<76 z2$L^-uF=n)E%1@1X@8K=Rf7-?;|Bh~@c~mjp4rj3k&&<8Q1s!NP{49Uy-x5Bq}1%a zdq)vo1A88SEa$HsD~%?=K4i$Sy<(+xEW_oIgr@l7`5rg8Mxstsr7drn>>7{wzO^hE zNuIHZ&kSGQUY{rP2+5auVZ8oruBpf3ei@`|LMksbOaE&V*WhWjTMnl_W?mqExeEZM zmcN}$t5A7h8~%XyL;A#tB{Z!CR|J8F?hmRv0YglB0chW)_i;6>t?R_E?t*kgge)3X zvL3+%Ljf5S210yTnMjJQmvGpNvmJf|(3Ip&#gK`u*jSyV!6_L2LxDvY$RzQUV5-B( zjd#c3#H1-}3GSjexs@P!CcDgBqa}eTF&VjNY8PfVQ+$^uHo$)r28ck4OTbn+WFpD; z0ZkxNa@&j8k2w19dE}l(NHod(+d&|mJNXwc!aqe;NJ1O zQ8dxsk&hTZE^tVx+WdeY9)zY~{5NA5Bn4L0HjLy!oy5G}!(g+4v(e@N zaAQ>_+@238Mn{qz#?ip#A!lzdLQ^%^MepM}03HG3q4uJ0Kpzfu2vr&rcj%oMP`$Fd zEV_#nvAdzSAvT3@GyW>PmMVDP)D1uxfD$|c0ToZMNeh;)!_Z2=tFK$1hu8FCm*At& ze!I|^AXs2B!x`v!5MOcbpgOO_(Td~^?9v#VMhFAWP35>XsNkRx zFzOCu8#r%l9kZIg3F43!wVj<)wF!%=KmLmVpOseW8?U; zdb)c_L*;dk?&hjuX07a)zgu+m8rHtt0M_0k?Z(lCC6r-PGSB^0vwm>XZJ0QI&BO3n zA5isXOlIG(7d6{FFn^#_t#j~R-)k>SuU2`^t+4iPGGBJV^7Ts>s9fdh9{7_Mw7oks z{L<|PwK&nnwG)$J!kWvP%=^{&SZn#O#N<8QenBJVDauVo!MQ(Pe9!u$#4LwBs2ni?BgCaQaD+IvS=E^^oqQ<42aS5&wBviAqw zc1(R&&A6EFW+%dV#?!l9YkkoV$HV)>ZflAHcH0Kuol3!U`A0?VM?a{xWwsafI zqO27$oE8GFM!pZ>brz-C-={83C$$a@JLf(En|zMu*hgyd{whuy+oPBqrU_1(*|I+fk5+KDLp++^jjic z&jBkx;&o|1ZQa(D zz5C(s*$t0rt>*ko>(?xDDyYpKfAd*q#zG=|r}j7p*8cZ<`r$ywi8ZdlW z_|!to`f?Ba2zGm?@4n_5F(OTnp>}Qt5DnLtpPM18h-0%2`bUUH=1R@v;tK9z2)~ZM zMA);_-I+z7VBhpL411gk8Ywb;xT7F5*YvVul}t_O%qw^$&i-CP<{sY-H}}Ex!Zybm zDst_X{!c@-tf?&}H)k5<7IWPh3c1CY>N1@kh9bao+u#wH2{- zB9aE58W@^`-QA1!aEw>;8~HsyLVgjKyWG96OqSqQO*>duX;di%SAe=s)A8`E@#*2u zF(2pMf9!vdt90QHilW8ge&pGjH>6`k^)u=DLff*pr3^;HN*V~VRQO>LVqZ^1Cq3(#-9xMlH8 z5sU^8u@${gLKamwG=Uf+B0|$~F!YoQlijQT(6D(_V%YHXC&zkeo-2pB6zH^j7?LpT z=Q4$W_0f}<`UjT-}spa2nH zDHua=rivIbDjmSB@}SwwE;k`At^-6Lf|C-*1HN94OK3d}7>hbfvjB+lfRZ?87{!)y z6qKk1WC5AC5G4o2W!0KBQusm}_nVOa)l}~k6=r_p8mA7aRGAiN-(ec%1bNyoyg&*3w>yk!0~_+U(~ z*SmaAZ#`^|($Sd5w+HA|`Q`cRcuxSLjx;EhjGIJ_dL<|=G z10VW&V0xHXhwEc`hwnu35I2BO;^M`YPt6fOgC94Xpagg|{jvv$J~7fP>g)4@z5uma z(nJwf)~=oDa0&qXmJx#w;04OP4Y?k{*?h&fUyxf7wIxajEjT>GNxC0a`W~9Tsv-d# z`JacF``Am!RI@I!k$=nkSyP)Ue;@tg`!0_0daDJ#EN&?o853S3%t5^9Pq0(NxOj3| z6UG2FbsXpk=(l5Pw1T1`l%)jz*^MX-m=S7R(DeWszixQLRy0%sNr3$8midfpE%2WV zfu$6hRryp8NCtWm(aQO6jZ%NY5B-qZnaf|EDF3QP5FU60rJMmhAeqj=!(`kv0eOXL zEns&8fCaABq2y2VXcis(1C%#__vC4z1Uy}l29~v2Mi0jdA1}<-J32pRYZ`ohj>!Vo zTLocVHB4<#p7zHZpTKRx!o_lO-Vo?r3s);A;Z1(AY@y zNwl5Edd4;quj~Y08x4&`D`$;3ZtW|?T-J5qx>;iLULv!1ftQd4-yJ& zR&9bHY?FXba9q$#8FkD%g#E_}`3hiWus^jQ1(hL6#Ky9SNE`={Lnv-}Y%Jz(BzW4sxev+ znsLdAmdyoY9-$HuRRy=LehOxt%V7_a^&Yfc%&_K=eiMHTRuzpu*T6Rff`_dK1K(** zC)=45DGHcM_YA}g22Uz#YL;QxAV{eWqzUMS zbd4A8+3_#!+RBQZ(3-85fOLLtpsCl?eHJp^_3@wj$g= z&VbOup&5qDL~rmD)|z>{jBEHPkU*bA7BHZ9Kv{AtVR#OW^TXRd#AxmWvOCB*pyx?U zf5JEo8etvY$$|454-{C`UoR9Xw>fw%1wwi>jRue&Xd4pP;Lb*5H<6zro)J5M&cxaG z#vu2F*b$9tMK?^-wZUlZ1n=KUPIb7#WfWIai-Y{}?3i{~!f1l!Vh}NS$we5rcTe^Y z&{sDpM5PqlcEOTH;r^%l?a70(+;7n1{Xo*dV^b?bhVVvXc1{fD?T^Ln3~!r`ep%If zOyg+2qF=qq;8?6BJUw{jtJG18i;r1c^Dn+d-we?JFSG&hpVBfmHik9^x(-v+Vb6oo zR-~POaaYF6tn#9xZsUMf|7nhxEjJb-qof3)STvkjH3(7`X=(1t0o-`Dcq0>D6agQ& zs;-g}>>`X9q_1|fwyy5>2i+|~LMx&66}G+APQt83%}%t~U>LyrL0z!TX2n^Hf=uMXvJ^w zbmEEMk4wv|F`JLMRn|~cDn2&0B>9J0^WfO^K0B;C$` z@O4mDu&J^Aa+tc;l*>g|vIT74vL&k8SfP@qsW2U0?C5)3BU@6pca+!`DLqzrkccqm zdIgofmV_3mb>iftm^3sn8!}?6=de08j3m7IM{D?1S0=KOBZGanA3ma`;L)FF;W-|> zz9t@HwuOgFZ)PV!XhTq6M$auN~ zKaG-GgcvRc9-KW})n?`kLxMNTOwIa#+^P20-l^;UE4s&6`$%ytM5QSJ?g$wn(D8M3 zb%o7KX+m4!T707Z@Ir~WIX5X@9%i10Jhd1J1L(vu$ z%___>-5W#}=ivjsFZZ~3biOuSVuP2qeYO3%*>4_C!f~+gj>74R;8WI&7On7wuDaj! zv;GJJgDZpVu_Nd9pHWq{&v$7nv}usoL)l!i0lbe^0e60=)oMJawC2#4xp^aF&6d~J{!40)jdeJFP2t<63)6l% zn`HxvXEx!TtCrj?|9S?Nt(e-;@6TmDw!-VEPfR+G@LmC!HZh4W;)hNN;AJuV+;{*%A@E@F+6i9+HjW^JI zMDM!(pwm@zmnU^LF`Ov9B_h<6>tpjCz+3Qaf~mJ#_5%1OD-Ed}LAmX)CV*!l-4Rqfz1afV>AZ`eXK5w`co519!Fk8r-_0v5 z{(4kmu#YuW)8^E$mjypInURDhvh4_Ng2qUx}6efBmU|ByxsDMiWf4R2p^$84$ z#EYAu8e`r=2*_JRZDhPv>_{&oa{!#TZc#im^#Pt?ylko%L-R9#)u@9vTEKk1k!Ckj zL>(#&1++tx;9o&sp>h%o2ir`b z9xe}zyX~J_X1I4HW-Wkd{eu!mxdQ~;fNib%YAawnPyYKeQLp#RaI8`Pnr3h*(CK9V zPUkBuC^pv>qopm8cm*7X*<`9?(@ZOnb1U9nrHvV<&@3qjJBnXTAyiCIm)6qaz@uZM zJUNu6aCy6uC077Jz+mFnZ$a^fsE1~$3^&CSgXaKio`JT2#oY>F*H`d6gKx&fV{*xp z90Vmg1RNdB7{ho0(3ofZGB?oZ2e_m0Qn)2Fas!)+-kn0UKCUeUiZ}54EN;M?I5ZwV z!azBip$v(d3wb;A0?DxVgJN^YIMkBIHn{d!xi=?jFt;qZxU1yFeDDdZf?A(w_6yWY z4c2UNj1FR3vN7wI1)q)pF{;pg+KfJKHTM%qB#kc7lAE|18(AUU8OC)}K zX2NBr7|(A#nt0IR?T~KuFwFu-qZc{c(O|%f8mA>2fYu%^Zjv@}&CopNx6(MXxN0H1 z9GeUY73kQIn2)ghq^oy%3MVdyIGpMSw* zE*#DpYtQ3m&V_M)4}K!TW!fGz8_p-<^k1G&a>H%la0U4W?G&TaS4)ly9^dHwW5p%$ zA5{DP==}^)`HfeEHXprskrT``1d?n2eyLEa2aXxN5Uk*yG<*Tkj1k<)R~RFL_687y zLLrpv7T67D8iF>(Mscb_gbu+NZIn+6+Q0>ZdH(Va4x)m&V=HMo&0E(w^hO6MMYti_ zkb`)PXV28#FZ0O}LlqO!aUH&`D z!2;&rrJ|#d;%TlFo-CY79)CNg?FK?oE zYwm%SXBOYodLp-C?3kiU-}NhLDV%nF^@E@1iS3MutW!*y5ZE&J#H#sEy{uQNISffC z)c^ZGY@K&p&inuW&0{MEA(D|TD<#<@PDUi{VN{|>QX(TnM&ejSRAi*C22xUX2$9iJ zNs?8OnIiPNKXX36-#@?4?e-m?bE@mQ-q&k9pO5wYwc)`JjpKVZH8@;Z79Z>A8N%J4 zn_KW;-J20wT8?h$FeOUpG>5y`U2vyM(4vapi30V-#5X`Xaz#fxdq7_#p>v)u+I~1Y zyPb*(>XxW=3j+cI_5=l4KJa|v@G2p9vCZ054L6!TQthfpkGxsZaogBMZ^YzTcRi|phxpHL;_up0Z`xY?^veT8%qWW|I ztg8C)LjpHBtSrvjHz9;g?E52W-sv@mkcJ}bS&9jc-MjqIaM$N`P_F7hY@p2n1cFn4 zosjQR%*qtwJ|DMcAJ~iNIq>3>_P{Ai9(MMEWxdmeP=yHQ#vHm+aa|V0=Xot!6mKa< zW< zX!fOql!-ocb0(kgOFfX1Gd6Z0jYT&OI2@-jA?hxZ3AzYJ{`cYqr{+9Pb~HXdRBLXO z6jW=Isc|k~CnKG&uHn!E7pPiFD|K{bY?iwey-XHONlEb`nbf_a$8Tc>vVW|sjLjJt z`)DD}qkLx@xr z?P7hUkuudJXKhiEU!!PVh-xpRL6=~_QEHsohwh~pE=bbv#er%-so!< zGq}B1B>&p?O%vHxVxswy?603bT}w;zqsXMVpmCn{tHk>Lj+mX@Ib~@Bi5o>~I(*T+ zd&mh{a3mLuTr|G{j?{bij`~)jvc%Acf7ya zgU!V8a(9G;>||eHkRtX6a$w!MbwG`*Sjs$*)IX9%LTt`CwI5)xht+m1C&%p9Licb~ z5pX5-F-jeKJ&@Lw_rga&4m5Mbg3e>sNqtWF{PD||`i50$Cr_L}vPia|X=tO3TA^sg z&=?{`S^Di*#Gi@34&PZO)hb=;uekcDdVZtBF8MJxb8mM`{*|P1Vz)=-nMECAhJ7pB zshX|YwXEq<#lK5>&(H5>ZSrNAK})UA+HX@nyK#)#`&MTM6q>gyzkgEI|Ky|G zB#ad%mND+u)ycn|AM~FS-S+H8tt}@v^&dJa&Mf6e!63sF zz5LULI{M4w9$qN4E6=v9tiB$9rQbT|wb!ivK3i|x@^kvPIdPYNC;9H#>rKk$gXi*>?ott+G{a+_U}9WL58o$M}Tq`|Xma zXNV072+VuZjp`Y$eb1eEHner6yHzWjROi88mK+RA7_Oe1@oG|+CVp7{7oyKLWCxl# zPI$P1YJB9VQOSB8ZG^X@HXeVkY2FS+oY{!AQTZcE$68f--dwsYG`q1y;QIZRt-|A6 z+hy?b4|~^|}p+UwWLEjgX(ZX&1@zbZ0 z(x}GiOzm(obU42=b6L34?j@~ke1cVr-CHJSJ#7+f-s*l|zM8g;75kqro{+idTExQu zuZ%7+k8k|w<51kD(%I-?QnTFhF6ISa9xuFg)4#v+IW^QwlfFzYG}JdjkYHTii~p`+ zpc|YXGy1*CZjA`zyH_i&70k+u_V1;8(AOgm67}}kr$lw-$p10v_1+zNKtwUBKg&Q|ot^6%w!kmYDH1O$81Ej*zU}zTV z6(`Auh~ts(Sw<&J$Vh+ifV}z_Z{FlAwlFa@#gqK8qT+q&3*BQ#G|^UwcnCNPdMP}E z#1#<~bBnJqzqCRxm?$3fJtHoLG;UDfzo7h3z>g{M2i!Xrhwfp#@3Ca_Ee$tn;kZm; zt4XAg@P&}mw6;TtN+PezZ`UrRJsJ_&nt}Rh6^~a~9BZR*RGc``C#`Q!gFQ|SN4URL zrlc$ub9J8YJ0f`HFEKfAsw2LaaeoMS{)COqykuKmk!#{; zQ!(?{qfEa>-kC}{@ptdvcaYgyOj_zOa%88fWBL=i=;rITE*O>^C-auoHf5AZWSKPR z44OiJA@f(iGfFG*`|CrJ?nYBiD2DVbXGhs#Q9UVkBpwri1j#E66H8h02 z{nIGnUzC+ek_qZ0@f09I0{Tlz@;bZ>G?hbKLi4|KI}cy+u064WJ@oWcLG4fm(g=uJ z4(5*P^$v&~!(cjb$w2C3VZSG$Io=)9u~1!Iym(O>u2SZMfmci03XI@9Fzn_nampc;xRi>bz2- zswq0eG9-Zlj8jf6agVH*os$!2d&z6r^Q(0QQiPgH<$kCmVMYu%lqvW2)Rq66;UFY7 zYX{8fF4&>qDJ+_cbd&d!!R94>=Q6cRLZQ$KGKJXy zlvgwsU_7h~nHDFhgP0ejNrCnjK2|0ibQ-BR{FLA>meXe}J@Q*! z=Qnihald`Dd^@A}v+F*ES+~-9+$V1Bwv(MU99f(AvxaVt8%sp_2s!a*aN&dRBQn{E z`+_}4H6I?{i|&Sk5Wd|q+BDwYg)EY#OF!)G@3%ed`Kwo=KH-d~O(3_LFN!Dj<=l_k zQ)(4Oh<+4VsBWshe<$f{nO-mmfohn4q2G4-$O#Fi6&9Mkd!vdyno+_7r)_@juE)fX(8p=S{KL5`3ctT7>Pnr# z=oaD)?n$3Wj|wwEpNJ4e<9~2C)D4yb?ot^`iC91U*;2jWn2>>CEEg=c276Y%KIq8a zgZL4vC#H;A?`o-buyed6m@Fr-h?D+{V|jhfJb}97!Gm1@PiV6@vJjwmfDSpM_&f`Q zAlSd34#b;10q3h(4L}>_+x5=Y2e2YI~#Awqi39TXmR&$ z-FO=G)v_F*BmWq4h|T~L=!$#?-xwX4-kMJNv<@IRXd-k=yjk*pkX5J2TI^*mda*U^ z+lOv}`;5Q~kQXO-)oH#fMyff6Q}IQ*c?( z${se&IY5C!U*0h~hmMZV_b*e`SFT(c=aE1+3bV8BJQv@vx*c5Wd@J;}DKk93B=tjn zi*~v1bK-mZMSV!=hENVR%qXzv-2qObb%qxvQm&x~R24#o6F?(u>3l z*ELUk=e4Nu40cSZxV6x{XN!X7oASq{4ee#W!K>oY+K#1xSDd5l4E9tZH)EKSXM3WB zwzO1(_wyk-cS^;U73!7kPYpA5bIdAutvufJVduQw_JISf65eSVHlFU;DnIkFW!LRW>>u;FsYyN;RR1U ztXHd>E>Wwv+V;wH<4m0%BS%~xCjFjazw&^Y*A5yuR%~Nc2;~DoSoiRXX3n1dlr~(w z`Jr-yBFy9E^hY5_lYRZHa`S@{RR->8q{h~(v9iDUQiIdr;@{#({ zr>|3@=rA0P+qQ&uxPh9lbCvF#IfXIl+n+Z-5%1DR)7(C}$C6fO!!s(L@8~;`zVnju z7eO_$@s>E2NLhp%a?jmxa3Qbu0GX+R@^_?m1=AtzXaGlU2^Cih7o_ z<%Lh>y9w^De^>lid@Mq*V0X@i=*sEMv{rlM-LC)ssOI>D=pOkY{T{g+Y~O+&WFbmE zLnh*_G(FsdwjRk?O3E=s+~X5MJJt64%I?afh%2(ZX01eY#x_$M;&J|t{dK4KgvsrP zX~#KREsC`oA9uLis9By{w-=fk%8m1wzFj(P=KQ~o$N2#5fad5PVfcd47M z6-WfS@ps2}H{dXlIQxu@);`xB#@GS;)1K=bYB6Ta71Ix%Jyq)%o_8LX{QgzgBC-sJ zf)ldwFlgjuS+sJm!)*(^w!%XvsguDTz@wDH!YhinZVOQy_Tbg1eZycB%n^>`X0boA)a!0#c0=j$+_huQN~V1CPN z?my$0j&@0E?cN*(1`RmMZoOkxv`!ph1&
    CB5t0Wn1;Ni>X3?ql)_CKU)bApArcg`9&>`yuXG zQDQ?sN4PuyWB^^l-+v!TP;uPbm{3h+GZ@1hh@(MLf}=dTr4h?uVv_>_0wY9Rz$~=<{3^Pz1okg^L0IpR zxSBS=4%i1J5D_8@O+d>^k-*$m^j84ig4_WlQ?M|7i&78=Jcm)#2DrB(Y(nR2NZ@h* zVXJJyI!^){tn38X1w-mOnnJ^Y7QOnALLug>CrnyQSq>D|$y$bbDvQWztTH>xg1kK6 zX+*MpY}#z}h>!jNt$slADvl2|xR+9Le@}AcQ_0*N)O5O3QMddDt~hQOf~_MD{vzZg zZMN5{Lx~R{5@x4TkN^RgMl3mWQwM;nO1nmVUvx+YQ9Dg{91y~v3D@WA_9GtLwRAQ|`;h2ZR+Z(JD z1pI)S1Va^b29E)&)D#3gQ!qv9`|-p3gE*e_O1n=Y`r5m96VgzC)x%FsAR~j=n4w~M z_B0MhRzP&$!I`3^j*22`;w0-D4VB7I!a!#saKMdLR->opAAU&yH-{mSSLMLvrf z4(@a+*xJ+67+TP|Ao~Yt(??ksWRkS4r8z0W4H<2^9yBbpe+mi0@t>9^@YX1tVjFZaPn~+r+JLw>y z8x|!)n&tVJ$Ye<~fqk$D)X5uT4i`-s9vNxC7EZb@V7UXH)CXcZb{SL}N#Tp0YBG5!4a72$pP8 ziWwfF1t1|qZo1BtK`Rw2l#c z8PpiCmIj7J3e&K#AcackFMz2mVg8X?zZHfPwg;DIW76OQfmsy2@Pv49Vz-;XT!usq z^Lq!NL_%pH7SKea1WpBvy|NWWJ8YDXFe^Ai&an(R2QRwETr0|pshgb&Jlqk`z|MG3^u?~0F1Dw0g}Pg z!{!QMH5Tc|NqL}(P5%0IGNB?fGZR$~0*{(%UdJV7eE+^yP}KvpDpaBXsi8kjjJ3gx z|7sX!0e_bAN9_VWBaeTOj|5ZATY6;!rNIgVvCoi4nNR&K>)YTQp>&P@&l0!YIU|Rrq5e&aIFeQ9zh74KdWZ12zbdses?;;#Kj(=c4)x|XNiJ@^6@qt* z9jB=OsgB}jhXaLN*I)hqLx36MEE=b6!7z>cfSHtkg3Hvem4@z?LDvLE~1Bf4fwcO??jsYJRR2a+$>s?mv@ zA3l+MH!S4a>Z2+F-xi#mw$A-cI9$1%vMv6njpMIQqr}*OqN2@rx^wderEd8-PJQJT zLPHx9|KX>35@PiOPHO zJZJTz#0Rmwh3c=)h0qo5WGVI#@Dus>gNc--j=7;MCzt&%*mCphc+UR)`COj9K#YEm z*s7)@1#f9jzNy%mt}4ka#29lbRkKdj`)va6L)AkpH}qLI9yt_rOZB+F0GsB)MvG6I zOx#}R1?F~9MZCAHy7$qu@7a# z>o`iZotMQ}giMQ1{;lc_48hHw89LByYky^Y*@4o7u!fn?@9Gw4hCf7@! zRgK#_s9#Sbx=(|ahoAD`D}VLojrrF&ogVw0KQGhO-Mx?JB_)FRm>V~O3JVJl=ls5? zP*_xylAC)~Sz4VKA;Oa#sO9ANT6_&pqnSq{e};yx9*_O{v;O{yhK7b>$lZ@ls%16z z^h7^=D1bbW7g6ZV+#PH0BqUtBc{3O&w9@Z@tx?m{)5&>xOrml0*tI6dH<05+1O)B+ z;Axe{b|)r1lZ{r0A7}}Vg9QQdi~481DuD>kJUl&9N=n#~(WWpI_x`+GKd6K#31pvv zp10;IAiVnc_`L7#j%4Cff6tmiU)ZSQXl(XZL%lyFE zrx;g~xv$Sko|hIde3hTI=tpj?7Ngo#r&c*J-HqXGIy9-x_SRzFX*4HI>3D(}CCatE zt+wmP^2q1sZ4h~qIx5*9%MtfuZgCQ+2f(L3K%>pm!*JNW49R&Fy2R0ZDJd-M1AU`P zuVRW7)CY!AU968-7bRt6EYZnn{+mo?IBv|24UPjaG>;_S4nrd&tK7^7K7hFwu-o^? zVKit`Eq}(Inw!h8|NJ=do(wv$jg-^ZqO$m%S_o&&AHX#ED)e02**?G2U6KMhjWCcH z086_XLHwD8Y5TVlaOtUo0($prR~MI;NSuNIgj4<|)qE0PC*!uRG?{R%j7I*!zGcgn zJh7eq=;k0vs^gN9l2wC~h`2Uj&%8PbsjK*BV=lt6Bn-qfv~EU*;We69KLJvW!si}d zjAG`HgMt^-Rts@|)Gxc~Y22x$kkgC!4(^%MmAzMy@*|_{XGqzxmUPbkyWIO1#ascf zkW}`}jf{*CGmh4d4oe^xcf=zJhD6c#NO5^5@jT&jEoTC>B8D-CM>v6dpZxG44GG{- zJiiJQC2{}x4RDO#duD(-=jk_2@V*%RDz_3FWRL(z3s&G;Twu;_0HI0wZZ)_K+k}MZ z1+@~ZIx7EoLN5MtLWCJchos6!u9eg&C`ySE6x_Cf%D;=1+IUoWh#npugj`6tvE(W+ z>d9A+jg8GCZ;3)8-4AA1DWXrd(Ki>lKA<3YKavL;kM#S7IAW!_6 zexyhiD3JGd;u$F*lIchPVK3wk-Q<0X{ac%@W8s~m{)ZNnz<8_1oCID<>h5E!f+p-z`%u+ z!3a-VPmdk6A>w;Os3v&couK1_Z{>yziga2h-PWiAD2PK0$wr8rW&+BOH0WKBA=hAR zQ=&U7dMaHUb2L?Pci%H!`Pa|L{jC%U1mWu*i0qv7RQE!JO2kB$WAm?Db3hVaL-N@~ z1z*mdo*ww4aFYIGs5AGIJQh@6tnib#Y*s7`FA-k_KEr;blv)cVeo}y}2+0MhOJeYk`Oc_x`Mh|JfN(%Sc*B&Y1hc zs5optW@pb)R3T9zEMqy8tI7AF97DMFMjzm_<4$faY(2KX@}eI6;Be4b%?Jho#?}+C z6p2L`;!z^V4*jrg&@Bbj zyFrq}p%_iH)L4$p(D{ihAUJ;6aYg@$psi-uL`dFDI)CHN5k@Q)Z?gSh{HvtSVDF*z z_LG6d9C5DmLO;Sha6;jz&Mx!GZ55VKV(3i#YoL>_CwZg)(%{z8ud%1az++q*KVM1c zF$2r9WN-A@A^St0(dc};$FczvkK=jEpo221?>8TKq)ZP+Zvbc=$2Z^a7^wpe7nqYH ziV7=?X`oL*D}pqvJvcA0v1zk)P8%6T;VMl&+(Q<)1wn-udgm#p4zE$1r}VjSp&45^ zv8n?6fl;D?jx%!H4B(-`>1iESJE5FXDniR{x+@<^Iwc_-u(P)h!1to45rv%*;~`k? zJ@x~4E4fr;l<@W6?6=t6i}ruCA|fIr0VZ{Sq9Dx7q$kA@G^c|i>XVC1(>+B}oTeab z1G_kJ&Fe7`Nqqin;w8r&wN;UuA_kBEG!m$Qm?6ja0wqQ{Wi5)yb}laJj*gCY7QJNP zWCe92b^%k0lDG<0%ogRQ6cn)hUFfKsNa$UnA?yzgjWysXtX9(|+Y#VwbcG?sbS&Z6 z4Tzt_1lW4XkTzlk+bSz7OM)S+R$v;fw{s6*!jd}2Jm6fgBcdR?B04UY5-z0h^B5$N#CeXu_#hi! zLqpd4_vtGIu@?{?cR4CDd;OO7!fQL~%NHvn03`8XGI0AZomm>vP^>1S8=>Q?{LL8` zhFf=%=CH`UQc@^kKu1D6tf(mb1eG3@KDlvqzA)eJ-3;JZk=cv2;0z$K_q7>-9@Y@2 zDJaq3ymzs9u@#3{w-@UyHY11xuYwtzhVThmLNJ3>aNo#@k=~FX-jFyA{t=+D1wx!- zj(bZyNN^!4${eK=mluKW5hw;zip*?sf#@-hJ_Up|IbXNiFMxpm2!XKOV<}DP`~iUn zIE!1*g+dwk6ICzb_eO%GUd;0+V9XI#3uJ*B2zU^2m$L0f1ROj#@Yh6Sa?_6{Bi%TA zR<@RKPD92v(^WWSI5oXMWV1EG)flwtKUmnR$Ihiaeag|h_<0WOUGkjKu#z6z_sjE@ zi^fELK-h=?2GQm-{V?@wXfQ(mLTXZq&5pF7jg4hP`Gb`NU}sQ6>4&%8cHF7=?d_p( zIU#L_iNBu83_>OcXaFu#Z=@hC23MMajJf)AB_$=1Ed$|G%&l9jh7zNs3IsXQ1VS`A zY~rxkGntwlZyN1m~8-Eq32o zs~syZ_p>P(}8EL0e+2E;$Ehybb;;LQDCA#{0iDJ2Qn#AD8M3z^9hl2S3<@^ zI>3@p8T%a8a1D%6q@7wxvLOo#KA>pUV65Pt`TF{%a4c7aSib<1o5tyBcWY}f3MCss zbgaUr-hcKfsE|R}3wdG3J2Nvg^z$ca!;G+@8SlgjUNbT?fRh9HI$x4w6hzpR`eh~U zAHRN+B?Pffh};XC>Dk%TiVAKd11YkIg0U)wc8_hS6D%nzGKbL~q?iXRE%_)&r`Mq$ zKa>E!4n5C){8$OxcCtC*KqP=c)`;1?zQ`0Ogare!hCu684B13EZ)Pm^AYMj%(??3u zhv0f>Y^)AN266ij{6N$(Ah5f>7@RbFb8{!A4c#&YotD_Nm)zb9v%(Nsf#X*kxFPj*3!%3*`ZQ~Qw5{P_!H)u}Ed+{lDN;YLmF z8R}}28o`jqEjBDKqRz3Df7LvTL(^;n*nxEQWG3=bjYqdSHBb7&8 zsvCV2{3;dKmT*-dEa{=s+&t20IsH$nn(Q-Rt5BCP$y>1)E34=!w- zv25$(og|@(laq(X$xHo*kmMs1Kz6^UPbCS-IO<@CiVeFq2B@kdZpSz}4v_N+6-g$e z>~i?HcHa4$SM=#KvIyuBR2&`2ax(C~Erqa1!F2b1%`Z(s7BRenax2NegBrdn`}zI3 zROCvM$G!cZ8AIvo-K=tG&JFuT{;`l1xnbS~QDYA;Y&wG{S#w?>_s6 z41tZJyXj9w=otk+RHB*W!a-?yY6UVLeU4CjZy z6qc0x*HS-kR*JLBxwkQV(ZF%%Zt@TqnYW9|(yzZ4)$QD6;e9$~bvTVBE&08&Meu=n zn`-+4)s6Yn70l$l{2Q)06r|Jj;Wwdf`RwFPVZYYB^ggH?Jla=qg`7z+t$Cktl!r=i zy1Cjbb?5bBLhD|d(yyFkP~t%k4_>NTOGzl6Nzq8J>%U9Flo+WF=77#0OgU<6075#;>oBCBO< zE6B;o33Db|gJQXNibe_c*)zKXdp4o>ldfET-n?AqPZg!ZxDT}y22^)0Y#iPHy(V$s z)I~>K0V9=zuisn`PMhIgs+^OZ&`e@rIeTL_?Pg7792C0|aaHy@zh@Wn@c|U05gG&H zQRn{^?Q!0OHQn~nMI-rq-IBd&GO;-Ka6XgDK!tl$=Utc&opn+S)iOH~GkX2~Ajb|V ztcH+x4)i=thpZMc301(Qq~YtYpW1Qhx^QoN5?h?!HWF`aZqIfsrHSRKqM9ff1Ztr% zEVIm1M!#RJRo|d~O-^t+;YYpr()ZXKJUzPIC(L?{kK&?YsF5$5LGeQL25?sf$D3#- zNI(UbD&jC;B&t{bjDr<3ge2RLGY1Rk`J!HB=ipF{5}2f5wm-B;uNoU`(K=|L$G57C z@5JtR;R^awuhbo~T4sKPZ^xsd;lT1uL~BG-OlSfmRfH+>B)rM`Q6zo|LP_7G7bao) zB&yrshOEcJxc=*NWLC&OZ+uJhRnvZgv}yFy(>_IsPtBQgY$p2u=vj8fP@--X5IB`e^s#MxD9urp#`$hu) zqrA-~Y7_=hKXN!!S^7Z*Ao(MLG#Jn^yPlK+XAC28mF}A38&@Os#6F0m04{9`=#7A6 z1%U}|WiJCP5D|s|#hJgT03k39wsw?}Z1@{&4C*4<4~6wqWJN{wykB)KhEgr<;-@v2 z@$`AxNjgkq8|0L$Lc-Pm&Y1oUfpvH)&W=KPpcn-Ngxz)6-<|*uAksUc|ATAsmZ@(! zrj^`K!d9UjY)CkZ-~fu^b#U8sL+CB|rE3c3i8KKoqvFv1VhADcpf`v`i;~Mw6>s}; zd%3&!PbhRha*`Y)NX+Ly&sAK0eDaY6>>xTppg^7w^z-8P{gaZK$Kt~6o=r$-GgasY zJIN`CL^KJJHG_E_t{Dj?(x$Tq{9q=5Sb%t_BmN=VA&QK0(8*s)a4DlBWFSZ&uM+SN zApxq6RU##y0>6E;DJ&@&gonus-0sP@u6`^8=s_Cs1e_m0KXu(-4kNcfjfadP1f7Id zmbf~F`D!YF_!PT&dlQah-1m&%v@$oAqPw!AuQ4)ZKFgs9oBpsUN zet)?^Owvgf=>EN_U#9Z+dbrXcZd&s5-|>l;k+uqf4^R~fF|>{*P3n=gM?Q#IJ-VM! zZF9_(VOgOY&v}iHwU3*r*13_Ddha^|AFe=y7?YRiOsV)J&RkJmo6!9+3>=w1Qd%R{F$B5=aQ5hyDwBj*XP4r(A)6cm6M zVMs$PJgP~)OlB$ytg@uzH7cDJ@P9yxRD);KckbC_($9-)P8U19NKv`$HuXgl{xEyY zWyEM-J`4SMmo@6EF`RVZwH-~Ocqu7XfFpplKzT8Hh}m z;1MDzEp35Yi4eJ-h&(XkiLWFyMKxrdxp|Wsmxn|Q&hqpiYDtm7f~zILCt0EiCWe~1 zz-Y$m4kP#VNca&9D|^3Ch`V`{#K5Vzb+GIJQ#| zObpxj00fL`*i#x8{ZQ`ts_n^JSheAbQy8NY|Md{(?Dmt>Q$yFt#u2uK*;onpYZdSm zf8mbkHkF^@-uF3=6;?c%jjBI*JDbCCqhiA*1Dr4ap@YoZ4l|c6teRLOn#jP)Pd7BA z*Qd9!)JoIy=!Z|9LhZnxa5md^ISWPb4yt;?4T_ zMK!;dwI3Mr)N;JuaY^ppyYCFtR2bvy_H4rmB`RrO`XtH0qVe>}_ZEdj`x#^2d73@U zmtMLkT2b8iMWYa-h3p4&2x~2i^sF>97KQ#9AEewFkCOtGKl__+5tBdTaKFD%_0g^9 zJmG8i9cr6&^HZ(Wx;i-1DFcghb93K~%q&orj&iQ-&;dIrB(4W^KiqEFLASN3Xfd>% z@A&FXw|V;?{@7WXUlmfNHi%^dwIkqz3Lueq%}MOSVg-?GP0Z@tj**|pV-_t6J(419 z!eXzg&W9en|DH;|W{r`nv$ONh@$uPjU%x*6=Vm9WP>Lz&y`200um^rF&b9I!d06QBXwG< zA%eZ#?zTveukX8iioLB}UBe7be@MI+=DuTX-*pN-=9u}GG^b8mH<42D7j9_aP*_~N z07dwnZN(0hN8VKRPnGSRykcOmxubP3V;74bHN_6()}HlOx4NE7?j>`Khq?+$-D!NL ziSLNJ(vh^s;-~d=TZ4pDCH;Nk$41RI3TN&Aaaj9i8F)Zj?46z0uj???TKS}wTQe6I z`m%MHt7#9{h=#v>ZD^qVX0kNeZiiAPRlCDx^qN(lCdO+pFOyrE%uTFT(U%NkfXX7~qvOE96)?DBO+O>bxE{tD+rPW9Hmw=YM3h#He0ru_i8>Lvoh>q76-_t7W6GuPuM_ za$IZp!)1!fJ(nYw9j;|Q*iu*+(&u8NPW_!d1>NK;)6&u&fg|}Hp|%LI1Y+< zHO_1c{l>*=1HWOKch+r1X56A1OxQ1d#-9biwngXlw!BGJh_di3vhb@hZL`mI$H)&} zXPPCfW?krWBX;kp3kQm*AI8p4XAVcwYBk?lN8xMkP%Ia>eY)6NirMD+i_B*J-JTo; zZq$6*_BV3{Pqd|MQ1{*FyQkGP>Xyu#p}2#R!<#-@oYH8mbCszGnQi%_T5Ov6uDP(c z$v4&6zffUMUZH>b(%W_C7iUaZdi>OquiW-^V9M0dI~DBUee7*{rr~E@5$)^_nVI;` zuh$neX;yDf`ekNl{4#z0^Zfo*JcIPEj$Re9w$h)3xJ0d%Wqq%mnmbwDb*j4Sy3QPR zqkO))<$-s=7#NisseQZ*kKLy+EmFPTo-$jJ(Zb8CfG-dRDpG>}iYolcS`(d^s zAJ@Q-(uMs4r$%Ti_W69RUhU}B#bC+c>ZNEzG?E0|qiz|2QSQPc0 z+Zwuu4@S@$@a=Z=;)>YSd$7d*tNllo$L#Hlg27pwB215Fn>e427u7m?9lR9Ft{bR- z@KSq1%GYSt$ydsQ7t8NUfr(2>G4wZOk&IhemaH?T#k~O zhimyV+m4AWPEYZ^)a1#Wt)OWcmT)-s*swG-Qj_mpmdU{YZQ7v2ZMt5gPNlZeRo3sqYe60C4rk@PAZWD5G zkneZ+wtanNrR@D}7kTyP<*kwymA;$k>h5A>nob=YQr#l2T5+jxIPB%2$gW&Z-yPkq zHaA*k_siSp-cwyCwMc2Jut$A)@I1}f_wx_^KB_J>iE#?1U1Vy0Bxs{%X`St3lYM0T z!?4gPZ=YEWeup$B!{iQwJ>r=%&4-V@4;}0%H9cOX{M}?&XZ^J59?1`vX4mNLKDIof zbyL5!d13H;{*^6@miug0Zt5c~{#gx2ECm)_u8%g19UJ|*ZvU)f%BIATaHahw25I^R z+15{^ic@r)Hrt$wG-;`eh|%#s7*(F!pRUMn<9ejjt<9%=_v0?M-$k25joLoz^D$)# zn!J^Jp(}riEdLw}!TYCMe^)&SY=6b)C}6>AcWmOs1-rKvmf21&$<{%#@3d2&I|P*d z(k;*Lw~*G?6R{{7XL1rMrBfDj{t_+4lBZj_Jv`meXGHMxC7Uci36;dCh5f z#S5;5=|7j7(GLqY-ejTkiqqR#%|dOXj`4FD{?041*=*N+TpVX8GUW67l?{%)qFSXV zdMQLwa*8#{(Z@0)zHrS8S>8NdpJk~Fxk*n#W;IH#=q$b+XUdc0X?H(umw4SU)aFnb^@e0jR&Hr?m7m}6c@)%7r7E{a zB(oaiRgE5>>uEF*3e+jdI@#dLk;LVB(AkUPW%~0JKb-`1D&527p%*NfV%O-;y*!aj zj-t9Tt$xhsbLWJd2-$AlQfH^1*H!JS-loVreRsN>Ip62AZnjK~lXcKUNAj!IUoSI8 zs7%AO_Aq5`w+WhQyQ@E=Wv;72Z`SW4$K<{BvP5BT*|Lg#qut+$;@%B~_hvPwWGw3s z+so+c9;@wIzfUakY8y*7uYG~z!%cJsnXZOj>6h%xJSOfK*3zubihZ&^;wY9(k=Q5u^eqmvPH z`9rp6!|?`1>^eQVrGh#mCjBhs8>1I*XP+{w z{eNEWbGw>yN>F=IZP8M%epk2?{n(@BrKU{^vqA#ix;6BwD(f4mv$eef?eqhhpAP9) zW!(+aXpgXynkwbQBgu+oY;(}q!Y35+HFGgl&`d4%lf)seuS__*U_0}Nm#g2s4R)F^ zI_z}E*~N_CB>QE31g*Z1`Ra$(s=--3eKsOjg(Y(y=Z<)vq;%Hi!ffMNeL{afFBRL9 z-Bx2)RshA&HM-^3eexct_tOkNRi_M#G?Zt^v50A>-}zpaJ+D{dnwQngfNAub+aa6M z*KaU2KOkb(_E~SnvGn+QSB+DRp_QBu+J?MSSN7cUyag{~aWAPuBi`E4wU14lvb7V2 z6}XI9@>(RCJ3k)o5f?GHZ8tBvq!alz*{qGZz=vDEAeuo{&6%R(SP zM&5{9dq0|WH6C`i%1#na(VgW+=RKqPVTW{dMW7Xg07wXJhjy>}?V+p4AoiQJ60=tIJohUfPI>r&}D-J^<|d4 z?u;iokD|K@d1s@ChuWgmws>}ivz{Jp)s$Q;8jklCwnNDHW;WsDVBvC*bK`u{zUe7#eMs(X=^Z-VY{^qtUzeo{Zr{}- zC)TOPU%qY0=$P29KzpIUHesh1SB$T;oslR!Afg|wylz@{*MLh0bCwSeHLrdA{i3F~ zDJwfXzR9N0N7-Jd19n?DMt8CPh)aK4-N}@X3(T~TW)5l{kXqXOo-IrLuAb-q z3sM`Bsv_-rw(Os7g+`Vm_ z+OeC5*_`dDc&49JgS%nrotb-NKJ(w{j815K!@^psTbp^~uKu-=JF2^yEOy$cnX1(a z4Vsg~U4Ny|n%{5#oN-lw`=jz)L*J_rG11=;)8sw5uGAG?%Hesse?TW`O_DqI5Kpxy zD}hlMTPo*4)y;;{4~iBXzH;06mYg+a7o?6)dhj?Z_`dPg3_I`V61p#gt5?bjTD-h= z*DU>NN&=Z=I#ll<@f`RGJDb_)aS$M;vY0j_W4T($e2s3?n2+(GfLYMqF=vkxjSgS; z?KH~viQzaz&N@b)m(zvu_1ZPO9jZBzMg=F-!|Z4a4%TLwRIu>sc7v)W$=1NO^wB}@ER%$pAgSH|MtqOqJ*FKlUBAtA64G*thI;t542@n zJ;RiDz#}C7RxzKkQq_-hstcL4Mlz9)v`yDE`iF3|X#0V=^Um z2|xrZ;WH|t8$NM7;1-rG%o-iBESh|Y9*C@{ViQ699EB-YnKYX+tE^f2b zwAthyN7vS;%MPj|U92ryK2ckEbd}qio~t%Q(w=TMb-Xrvo7~)5`+=|7MctRv={vVg zGCDLf7oHnS(I=42D>Hs3>YS4^PE5y`*oB%oc5`LFd>@~}YH4|np);MuzS2TR&~SED z`+{`q*s<1kjcz5T_vOa6R?IrK^`Ey{s@!9-Yl64*XENgxCFU1g9GY7bhx~b?C^24@ z!*r_aEcqvk^1>RT*M{>C@Qy{ceT-arf;G1euC_B;?G&TRf_~A*Qa}DveiKREDaoQ- zOBF6M&THVj?fQjbrDI?#0J9Fzr+*#Hl;c2iLyYT)FFA2qjz<10ul2TKku@(qohR#7 zjH<_qr|=qu(n%FfP5eA*dDhz6 z#of-;V~cHD5`MAWLrveq%G|?R7O&t3g@l-dq==Y=h`8h_F$q~IFXC?D(&5@UVBbcK0xMcKd&PM&_R{dCOU=fzNCtpRx9` zw~0&7=yp7zw^&Om#@fmhBB}M63j;cLq~GrVam^j zxg-Umn`L$$dhWN8UGd7VR$+uqj>If#eNR$CXjw1TNWCkB9ZZ+YpAhY&drzL>}%MAZ5zQ*g0qBOy#MTY;Y-zzC%b z45{t`bT5!HpB(9{Pm`5T~S)VquN&VWRu%cdl_OM}f5lDI%QC(!Kpx~{AFW9L1O*tm~XZZwo0 zXDmUob0vY2IRQrqkrW!u&dKjD%>Cey(J@RF>vTVVIOuXM?;>SZ*i)=s_<-VXCIt=l zJEj@bLFkikv~J0J2Et!>>(1@Bc23oZ@pj}|6avT32N6-vA~UrKX0+RB=3#HicN^*# zLkr)Q5_3msyr7ojZLhE8)Oghvjj*h011977KsQ^yP2rOMcQa3<&wNV}ueMQ)gUH7F z^(srBPH;eA2b!#)$QGlKAD*5b{E4Tvqz(;@p3Vhvo5XM7wbg9Nj|%JxJqiZ9CZArc zep|2^%45raMqp`zKahKn*qmP$sojz4Y)WoI0Q-Ia{pIqOO5$!^$aSUH<@z10zst+f z@#4VaOEYfZ>)FMf;8kvuQNKn63)p_VBjvUTcZ4iHgs)8b0Ixg>4y21Ah*X0IO`8IrLn;iZGGJtca5a%Zfk=j|OVI8?J)2;bU?cssj@Y^o8vRI) zC}(;07Z5ig9-FMeBov|1v#9((p&G`^%9#o%hgXYn zkmw{@n+iHZafj&&@g(p{eg6HuTSA}^r!v^OP@)`*2i~kejtwzZlDZ)C0J9lZFYt@_ zVxigrQ8N*Ln7)WZLGA&9D;Q)3OKC`^F)|LMiII{fd|FUc!?Gr=dYqb3izY_*^kTh2 zHOg}&h7+E`2;n9&@&Jb87lBXtu}lM;n_U;)-B^9$9$T*uoDivoQuWlH=u4qap>l(& zB5;K{^&yTKj?n9}YoQ)uWJ(d1@~#p0#w^U)Tp{hD?TMO_e9|W*0#wKeQWX42{A1ae z;@b)EQj{gPCHy7kztxmE4?~g~lcJKEloOPrlfG4GR_I!_{f7HdB{P#Eki0OKW~%6n znJJm4pf9nmU{7X1<&i9?!bStabdcer%w~yij8m577VS{RHd)xS0SO^q-s(ruS%~>ufbdHu2rL$TW$YiT6w>GM!oKvV3EJ9pHN6C zvo^U5xuQwbuzf579)A{vJpGiAeBM8ZDZ8vPcH;T3S=_PWJHP+Ux6V(`cD6)ngS2gH zY-?C*dbCfqUstkLzN|#8%s10F$J=-u8S<#}*yifyn(@H6+8_Di8L&)&}O9j6_E4%cU2OC%Q&{!Gm26fPFr3lT52&BxE>SN2K2$N1+5r3IyfFBbgS z3>?rC{WClL733Jc#WBP&1Ws;JY)GCs=UqH6;Wx}P>_5CbTotQ`bDuRU=PL(ARVAM# zpEuufDChmfkFY0o-ND)t&zO#R&3eWDX?b#mVW4`cdio@Y`&W)po>A{g_pk1a!JlM< z#--x1zw~4qDTi1loNM;DC+^ij>eA{6HF(wD)owM*AaM{o=-#fJqnAU)w$iqC72OWE zmAe(+u5PK&z1CgJ9m_raRLO_aC+Jn;IW93Yk%&Buyh>u-`J0m=4<^qaE?4d&u5ey# zUJkBa0yAbhvp>;F`x1UG&TivwD`&0_&HoN&=YADuOZ2(7y8PqO;4&ZehXX)x@OvnNjbJ$JOU&;=6#EE7K|CT*Q8eU`QFr6-ahS3}+05_B1~vlw%(;3m%~-NQZL zJ=hHQnd(S!GS|~LX@z3CB*ZGTxuki`%79BLhi+Pkhqs5NCx2qQcCUBy@lyTT`*Hmx z^~C~r9haWfo27?U!|cxd(LB}S7EUu5DOh8fbUC|eV_$6VWD<8$e!ONvWn6ZGm+TMb zKUf`Ve8oJD4pCDkIy5(h%bZ4YnLaMLZgS$W{Dck7f=`0Gs}vkb1{8W>0%2HT#HCaU z{9N)TS&rJPu3d-U%LM7+>7MMM1wwd>%}Ng^y;!@&xC%1E`_ zs>3Q-jXphAtEn*O^s6ZwS{q6m-o>yw8(oJ6|HmgL{6>zyZ=p0v7SB|%#wPD%&bn-hrcWmJFsh!VmD7Ee!D{M_q>os&~ zSFKf5dElSvtA~K3tiA6`&N>=74INBsL{C9Hxt)!EhWVx&*RNL34n(lLv(5=l^>+z- zh2ulbLqBe%UT>b>!mGolhj)kL$O|R4v%0g8iQV`*uQLuCqIXtC(sK)PG#!oWJIcK_ zOAbsoDkfFpb*NhY^75bE+=4PdcdSMh)A>j|K2HSKy4POk*CSs%^|_oAof@{1R)0Kk zEVd5yb-nqwuhsOtH9qeR(-UcA@eI4?v>*B0{%Hxcv8ZdRGw4L~D&8?4oLX&Dc5qw& zw|3T%?6beHHrSPaKGmbxZtx`ZD*Pxq6rmQd^rp0U2=eNtFQ__I`djQ1-w|)XHx_X9 z!)HL{X#UEAYNpaB$0zO<`c~gd;LYnEVeb2kA7y~`i`L`se|v>}FK^bz#EToRGB4H_ zw@IAd4(`2fUaKsq{1+E`>s>SFvVsSGOSg%amw(Gj6lVk!Uqzl8&#F!rOC5|h&QSg# zAxCo=$DOEvgT;Fl@??C%CjS#RYH>##LHg9OeK58G?nI zkdYVx3=CP$TUFCl*~o*$(b>V=%GQj;)yvV0bnlIdouL=?vgi=?4Jbk=9m{(g~24(+jI5ijZ|a8`x8QFI?q0YyTDl~&Ni#PA>+1t~p{91MAwjgCc z=->StkNzw1&huq&f?=ktD&ZpX9uCgHe!*k6!FA%nX!Oftn(1kmAd$l7vf9q+XTm%f z1W~y)Z~3Js2Gz5$+SS(_66~@*>6CzU)qsRQxhPlpC<4{jx3s}8|2)Q%I{f(`(*M1` zUcVc_F2v>Z5>xVN6KOjqB-z_M$XfVgpLYHzyI^pAj52_vht_glT`<6S{i3Pf%hCQk zG`B?3AnZ)c3*|sWNV$&e@gQ4-D*vc2W{+~SI4qnJ3xkhx&ka6Qsjk#<>NMgqDwQH` z?Jcr>mH#bzSzS4L1Kav%y8X-kwPj(G08jL`;7Gjj+k5HBzNJgyMc!|lLBCMKl%5{~ z53Wp@@u_p@wb{gG20T&yq>{39$7co}?}OYv(M*JE2)~RFNDrS}3KLY|4~JtNrSVi| zOD2Twcc7RJBthwq`Ghj_nH#=`ASGK8M2YhF62$gk=7 zQNg5M>Cgq`N0#?=w*-s;FDl*i_GrcwV6K!azh;#O7w2ko_bjm&4VHKEganKWNgv@7 z9q%OrdFUlPz3$u+ODw)5J1~bqu1-^wkB~Cu!gJV^7yZ~#kt`FB_WxI(u+_4bLFUqBcy(ez>JoM# zFLV84F`Q!<>TAT`>xhAK-qk^e-+374@(jd`eiSU1&rYR>E<#T$Qxwns-9LX)-e)}V zdGzIPP;k{x`cq#9owI>wI| z#WH^f;d0iVyRS4c&BE9CbB;elpS6gJJX?9!&NfA16z{XytZ2(V_)nforX7AcX(adM zGVSE|dOaW#$?Bw#FEd2fIzWF%*NG(MEzL{`?2Zd%p*%ZY;+PvA_xPbRn*|l%Hrj`` z4VUawNG;dPsM(>)L;tg}(Hv>7O};OsXH=ZNl(@u;?}bzFw@1_IA-`fBg^UznUvBwCh08wy1)NK@$ zckEX|@fU<{V?I1%rjz!OXR>7)DDeoo<1Y(=-&SS<6npBJ)vC0O$7Ua~*FEpsaT>3r&W zqf(^g9acdJ8|q*C@MNC37&+YQoLe`Hp-AVEvu-~}JT+|X-fOPiiA{yO+!&g1*hpo(42Fq`q`o#Kb8myNiS7PM2U#IC`U{2S>+VF6iPI0htFEo&MDwu5S*l%8EBrNVshqM~$V%ZI z-0X-+ksvlg_*^r>LKal~2#y%~0qx9C0f~cev4R)z z_(6}SF7W=6NfISnwaZ{)%pWFYb@s3h#%x$P|PnvSF*6Xv(Y#gLR5&JjMBc)lh0eP7%A}d)Ng2iGNEGsT!$8P1wDhy~P(i57Shsm$3R zCA3Rv=@PyaxNE$SkQHX!GNHmuKG%G1L%jS$z?&%9a44Rg{=4uR!&TE3p6g+= z0;AmOPckdm_cN9JJQc0xiLq6AFAwJD2w((m^vcmkg-#dL@^uDmP^ z`q8%ksvJEiXEE9F8c=b{@sUQ5tDW{b|6aLV0P4?Sb+eTFwhRhs)5R4HS2XqEF$cXl$jthC}4#_SKD+3E+GC7MQba1g% zG$XlW9-|`#qUGjid#tqTQUZnd< z@UC~(p9EH=;TJxw^34m%vu}=tzfn!$C-Lv5if9D>@IYFMT#%6LaqM|nwuB7hqrw$R zeoC!E$PH>2`)Y~b5MGG-37dd5B$Hh3hNUTdY}#cj-E%4l4HXe~CMW@pU^Zxv@#JXe zAB|6lZsMRkT(88ecHt z^y3VuDL5Elx$tg}u*|2pr>Sw~t{dvATq+V|(WdAUgv>Jp-y@2sO+v`il!WqK@hTdX zP6>P=Js9R2#S~1YaNjL<{!WEmU+cHNmNc9(uWBYE&q=`&#ZsnyNfS;UTj3WOD z!P*tsBtgr7qn0AY00r{Yr+p>&YUv0aPU@jbY;16*9+#2H3_&jXq^qV})tW`p17}wv zN0InTWdKq(uOcR|(xR{utG_8FUByJ(ra4zx(#CQ2&u1G6JQ|MD(x_(EbT&zO5{$5Y zsVc?vKwsr{MG7$f?@yr?4S367FMHCEKhYQpet(M)NyeO1&ygjs*sG4O;!0J!BxquH;*T~6WhH6d9YpE_w4YRh>ZsV5O+Fcz$Yjk%%i#=Y4&2no-@?h-e z{0n|0S3UULRU5=vaEYX}B*bGI!-rxQ6k~n`x0%C201k;UjU;TCfF_R`;90Kg@CaiL zVQYAXySLQQ2yxOWJ1tG1fifcIf z+in<1maYXFmuH>GEvz5N(rSZ;sL>VZ(Knu{!A|^^pf~ld5|G&#N-ZRT&c!&$n#}Y` zcf+$tC6vx~TWDH=^o*0X4)S|ar#pn4wNF*GAK0cdgxKw`fESj`DQULBOXd3M1Xh4( zB9e_I{)enMEzXb&Yqu7}?-;DSDp%`X-`l1q9pd)Y{6v@fiv)HhMQ`WIpdJ;>soOd9 zKHdGfS%kGl*2?mu3N_aO2=(?$h z0#q>;XW!QT+9m%ehv9h@>0#aEjAG;96&e2I$@qITi9b@r@hxM*UlQ^QHnNaJDD=(N zrsl)j&w@Z}odT+Z-*cR^og}?BE8mSPMNT?7G96GnU5oNGt$YrGH}L6L|Hk#&lUO0Z zg?2+~hxEAEEp#lzK+iM8(0UT8Mxc)3n=T_!ZMdvwBn#0ZBZ2s%5-I)axcf(~m}sw@ z-dqnh^r`s->6Yra_rHfDx#se-E?mn)jlJRxUJCe|ygYUHv3i~R=&d=R^xg-&s6SHf z(&8e%c%Otx(D$69%kzB@9}tt%$;=(et~s31vbO9|6g6{ zY3B$AMw1w(qT@XD74~HxlRp>{c6X;e?Pf94|L#H0lsaxl&RAZ*zi}dZc4LOg?^iK* zVzs{MrSU?t+C(jfQ}jlwwo6ows(H3t4ZcpA+4qeqURG+J6_N+S1l{I;Kc#B^N{Ee-AHB)!F7!D+iXSFB0K;D5QjFit4t^c${12ukSClpO~A05EAWVO7tSlQj=toxO$r%Z-I*#`7K%UMKe) zemK-oL(vSzsxZvojgqwFli@wuBFiZ~GPs2ln4XZ}=;$mYK0Le)J3Z;9()Yaq6v}4e zjg)muTfWzGl2;3B4RvwT{4>{qs3NeWq5tp82=<=kc_2aC$CqMdvvGIGu5Bia1u9%O z`JUB;>Hi(J;3^GXTPL`<{pa`j<3b10-e!TT*}((r&2|4>xJRoF*4D?_aY^s_pH>`+ z8QlMV<@<|Dl6>?eG0094ai!BZ-+S8${$m)fskQKYKB_{iDiGb%EAe^GeE(gAU(Eul zn4hfR6fVfX=f05jzK1#o=HvH~hOXvcmlNFxTMF(hp&d_`-8~rR)kZ2FGr||2-;NA} z^-5cI&f?5`Pdz+{-sc1?`tEk_Z+i*e>z*H0wf|YZPra{jMxdKJ&z4{#6x_dRsZj>rr-H4cxDo}RoSVJWRJGje=;*_3zY3+pnXcLq%KD@o zGLCG3&ebL3`r7iF;s^qKW-Z+A@+cg~ksoL~4|`n-R$oIZ9P zRg930FfC5ja=lGYtGcu8RJv`mtT}Zvy1J39;%>aHTqDsfy&jBTqgozq_!zJWPCR9U z^31eX>+TrlSL29Z&R6GnEPx?v z%tLt9@n3(cse)*)g*Akn8img8I349aNNeu+D3~}cLZqu;Bx6W8bsu@5Rkbz{FN*<5 zkd@9g6h?FFJ%MLB>VM@3MW4aoP%r__)58yL>(khGn#n|xOBnj8u{RDXGYDJSLxNjG zBZv5ZCD2?3-O3McWh3Yzfh*$k&ql=p+Bz5aXDhWK5=X-AtxEx}`iz&~LV2?q&N_&t z_in~upktp&wEp>r+dPZp^TR9S#@fgR)_!#lkPViR!OLfz^9J9S8b%ieeOCX|k@Mqy zgPlu{5%wnfctIS(8n4^?CQuv@o%}nfjnm&+LAE|2ONKH~zCHGe-|%hC)f*fCt8)9( zDSK$Vlvb>ttsXr=X90Earo74=v)W**d1fms`$DBQc9Z{A5L&q*Ys=|WRx6NO#e&t9 zX2W#{6$>b1J*=P5=>6_&qe#2^cLYuUt+=n-5Sw>-GpmI zp6|<=)3on42Pt?e!gzLf6rft*-NTnqOJxE=?pK9NZOiVD)(TB}T%M|0HqRuez(6dax;uN{@ z=#>{0TnltXD0XdMvpt?Z3>pMX^m@J(B zmJPnHR+v$rj*&p)jY7|}tCIv(-Mi{(YA%hUd3gm)$|>^B70i!l)yVTitOZq86j<>$ z7D#_gyoi?;EIL?0=_^ibXlQsPa#@pcfbbT|F^8>ZM?-@XV*yc~-t{F(e?EV{j?d}w zerxHoV5>rwV1jLR--(Tm@G;taF&R0vJOPlerSkI21+~=9;b93?HSzz}-$U8_AuIv{ zIS-Y3O?8DYVD`VMd8Emk}9iqzx=n}HzAlRiT3f`PThr-ojlEd4rS$HCnl zBKgQD?!xYBIVUaxn&_0W|8N~jWS$KX8uIcIPFu@uA&FA(OV`Aqt9_YSjjaxfC2x7y z7ew=0VFDM6R7@ysfz5Lxyo78YHFxvjM>Q-gXgK4rk7@}nrh(5Yo%?I6vTbalLa1Qj z=OzA&?oGoO^`a8ObIr^(*Xhej29cD8Mi8Iwb5>ebl5^vfHT&|Zdmrx)@XFFc_X2hG zd1OBmtBpZ|C$(>fnLQ{UqubV)v2QbJ(w4WPrKN_nil9E`BeMHt@g-eTA|oSR zd-N?_xd;Si3`2!yDlX>;i1DSnB}txD)Ck`hYl_CVY}wQP9J+~#OQ4~l1r2x}+yG-b zIzFzbtsOpeeKOF5%oUg1EpZ%_3^$6L0|Fd}M~#G!SD~dH-vpu^s7B?_P$(-7cz=t* zVa|yhl{z{)((JS6Uu|>n;G}U|tGGn8e_iWwnku%^{=V3|e&3dJ-wISGD>ni14NopJ z85vnmW#Ms zXKq3*5hH8_!G2GfPb+euS+HC^1)(41R?fU5XMoP$;gXm z*G9A~H)(4_c0T`))(aJt;YrbL=K^aBq5=lf&MuvPxQ9ooi`Z|9zNNGY)N-HG`jhzC?nVM3}mno8yvtr24FEyCJY%spXeFRG2(G*C3WK6W_ z{ILAZ>1ujW5n1oslMOT?LI33l@yxcv=N(5buEX7A06K=uTP}Cv1snaI-4$}2$*iob zxCoHh)ONBRu$lb_p_0&(zq!K0AfQ}Lac+Fm(V+)Qtw@Ei(llV z+D50Nqr>6%W=q%i#x0hJEp2X286TG!jwP0S2Zz+`GH1-@bTI#Uv4u#;1J$%F<}1Qy zWK>wK)fAq~>tSrunmErH5g7^V-01Q(Q*P;WDu?gZ)Rl)&(DxlORz5bz^-B8ZuU}~P z4GB3pa&Btug74i~%{Q*l$ONNtxxAV^-t5s3!1|PY#)Kn{GmAFf9u3#GV}BSNnemj5 zXf+Yb_xS@twENT;#BpV9mv@Kgy_uVND^MXx6j_97W6L{0V8d2$j}<{zu!z6H#fH=~ zpcrq)^HuC{m5=#J%Fi)35011mwumMrE$!lvI=|6+kM0DSS2O}?ulstY_&xBgq_m`H z@o;Q>oQ#~GFjrsESQ$OqxtzlAc1E!2WW)>;6SLlBL}8F|%hb#wF(+yA&F^`REix*K z^rx-)Y(Y?{4nt@}1O_2t7__Q}Mw92IF{nmI(Mp>3*3AtOk1G-=9(Z{8ipt6)TOiBF zmIBF(CrYKs(dkMPDZqP$eK_=l^9n= z9W4nIqu z>_>lqxvQC|=#je%)_l*#%bwN7{ii<9hL?@!*QTIeXjO*Q(vkX(Ld|yCT)6zhM2pnB zvb>z8R3UeK(#67g5RUq*4J)-e!2G4RQeuVo`n1RL_%Ifl zF{P@eR%n&=&h#5lIJ9g59)qh#8rTpse?)kj>Ux{qlX(+jAC8@?S|Dwzgf(nXD8F$0 zd-(bAkSHG;D|w8}a_CyxLzznX35;)um7dSv(gxW_ss!gpZG3 zUR!NFmoS-JJRHS59Z0+2xtlpBJkLR|wgDOhjtGZVhZ&Pp z9Mv2B;Yj$}=MdjuNmpnY7-D{3UAbYUs!hP2*>_Rb5Kg$3tGDvg+cN^(vTg}P=EvU) zspb59*WW9U{DY_NQCy1hC!t{G6JcpZ>ppMn4Q4LV}-ZM!m z47BE)QmYI(Dmf~QsVR=E>{CYj{cJg^S+=axJJIDoL(7`-^KtDa#q)>Sa*F^c=*Q1l z{@ikGhtY8~GvR-(RKxs~JQiANwS?jzI}RU;sz>@oATL&@t*66ev33MmQ5n+LQXo&q zXY&>>4l&TLZS}se=_wJZq=0i))zt>6ry%tT;uDRH`-Oxs?S|@~1Y3?GFiH;I=Nb;% zN{~6?p(7U!6H{4hD|Ydmr)L&nc1_3WmNk~6DsP46H~FXGaHYIR3PZ411fw)Xb+%yo5OpkG}ZRvN7!^QMk~JI?$%J2+CY&sS>0hes%&jiHSK zn01I#6^`?lCyNWkmY@>P9eXQCU*I66_`Y&T{+2FV^oX>Rr?`76!)`pD*a3 z*7t1@a{0yVbTs@7VDP2Jf}Vd`N5ChpmZ-esPLLp%lM@^BX#Y9*oxIXmr8SV4sdd;P^B?o4KX@>Z0I?$ATbg6XF<>}aRfV_e z1;d%@d@s;JaQ0)wmy50Tpi+0*Xik4(|4-P(&1r`^w_;_$FQ zUT$0eUY3U5vwt3%7SN5txvX)Bmi%SU6XSGFPEO+hfl&1HY=`bco-wp+Muj{9=elJ_0cY6pO315&tVq0Fkgoes7NG|OZ!-Ll zNX9*jVfptLtPVC454sVxsi|%^Vh<}FLPZbitc_XNOM$TeO_0pe?)EeO!4eqY%JiL$ z_O{Gw;ZB3_Tpm{+TOl+9ihZO>{^tyhAyXsheD7O#2N9tmYfbYU+W##zwjNnYC+i?cF4R%XQOZUxh*`-tW+P*h$OmQcers$%A%F3=8 zwiyzQc{UI1#oHD3_$NYH23}rX9vEdF90rx+-Li!VOJ-zr^mcy`ByhVl2CczGgMip&Tl0UD znTWV7p^w-5fG8gXP+18xOsmNp;G^RH{sOXT3|rgZnM%96b5H~x0=Kt~hvNt*y!LZP z0Brssr~uav4GSxzfv?i28#Zom9QgG4tNmXt^&0J8NtCkfPe)ah=-9$+uaKp!|J>S>cMSv<`f6*NU zA`B3X$z0y_|F|UnNZ;Enlfc`935WgP(VqMD31FtnKpSJ~dtphYk{`6?xW1ZTQZX=4 z6cvRG3k&1QohDJt{V33%N;_+#^cg(MLU@j!=4lWiV0|IsOd%`mERWqtuAh0(dK()B z^t|>``K~8fcXxL$fDt)Pu`gZ>A~4lEZi(b5Q1j%*0SpzHh;I~Vo?z_{+uyx_F@L;k zb-L10Qc}LWT+ifqA5{RHC_O*mEwjCy3*Z(&vjk$_TToaS^!KlBp6524`E(9kf2-Xp zj({&$x`978bV$%xGWGQ+c|0mEF72}BM|s>}2XP!->6fX|?i_v%rEgL)g<$1t+wne^Q+T^4h7{D06sik5l@+GFE zU;^HOXgDJHW+;ZjX0oEd7BHfI`!-hp7tdDvwdl%9Rdsda7Hw%oMQV0-tg?A&1qI}} zxw(&;f+6w@#G+Tf0O>fOp`(lR`M*jjDvnsP$w;VwES8?(tzDgh%tg?4=k)mYr_@9> z2uQtCa<^?M(-5vtW~s9%WxXO3sL%J9E3&TAVU zJG=?CZqUudRAY}qfWw>W+1=`*&PxAbiya-6I_o_C@^;tOcR3>Xh88jqSVZ&J_j)RZ zkT)$l8s5J1kP0XUa>+OfPR?{7vV((zvusGoV?V9Ra)*IJ9K1Q42OpiB+^wvt-Hzp4 z-vplhrYuhTxgL!sBgp6&7;|i~q@v*$hh_O!WrDA8^7CAKDJnQzjwBnt*GLXMx7a(w zvE*!Qs^bX%x#Rc`HvN0YJq1Z|%}gq*|6YRbBsS~1KFu8Fg$?vdgw6W!9U}|+2qH!h z=hg8Oce+e-?;inW^_Wv%UysdOK7UwVQ}d&~9u=Tw*4A_l4GmyyoSZ7EssM8-DJ?Z- z!>g#MAZ262Odf*@9NH>qZA}C6aO2(M(}RVD1>`C*AwkI1l`S?l77*ruLIB&1f zI{GVKY)MIpAxE~AwRI7o;DOUzJI@GQ{s-}%zzH8Id6@}#yUy3({&Si(G&6K~dGh*= zKTi*3YNV+SrWsL}{RO7+>@euh=+nIVQQJpXOziEm+}L2TudD>jfq??y5M;u_(hk-_ ziQ+^+2i^OE0fouP@&i~k?d|R4jb)dYmlPBf?Th>H1$u%J5fSr!0q@xJBBG*{7#~d} za=o;r*LW2VY+%DT2uk#{whDB? zx0M$BKY!JmHsD8o-b~*4v#jmYk!#f$M&w|^fg74 z+tt*P%`(`|0+_&|!wT~~+Nzba&2z@`YeG^EV9lvoIH*V3o3&^Igh0}V(;KU6Z8``N z2FNx|^xsK
  • From 3d7f72a532841a1ce0a9635158b04a60ea080bf1 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 16:21:39 -0500 Subject: [PATCH 0683/1112] add uningest for a problem logic in api --- .../edu/harvard/iq/dataverse/api/Files.java | 24 ++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 5d400ee1438..1f0e0801c68 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -51,6 +51,7 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.URLTokenUtil; +import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -65,6 +66,7 @@ import java.util.logging.Logger; import jakarta.ejb.EJB; import jakarta.ejb.EJBException; +import jakarta.faces.application.FacesMessage; import jakarta.inject.Inject; import jakarta.json.Json; import jakarta.json.JsonArray; @@ -637,7 +639,27 @@ public Response uningestDatafile(@Context ContainerRequestContext crc, @PathPara if (dataFile == null) { return error(Response.Status.NOT_FOUND, "File not found for given id."); } - + if (!dataFile.isTabularData()) { + // Ingest never succeeded, either there was a failure or this is not a tabular + // data file + // We allow anyone who can publish to uningest in order to clear a problem + if (dataFile.isIngestProblem()) { + try { + AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc); + if (!(permissionSvc.permissionsFor(au, dataFile).contains(Permission.PublishDataset))) { + return forbidden( + "Uningesting to remove an ingest problem can only be done by those who can publish the dataset"); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + dataFile.setIngestDone(); + dataFile.setIngestReport(null); + } else { + return error(Response.Status.BAD_REQUEST, + BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); + } + } if (!dataFile.isTabularData()) { return error(Response.Status.BAD_REQUEST, "Cannot uningest non-tabular file."); } From 7de7f43c99d7f79a7a4a255e75bfa08506361a41 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 16:21:56 -0500 Subject: [PATCH 0684/1112] update docs --- .../source/user/tabulardataingest/ingestprocess.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index 4dce441de4a..418eb2206c8 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -67,9 +67,10 @@ who can see the draft version of the dataset containing the file that will indic part of the dataset, there will be no indication that ingest was attempted and failed. If the warning message is a concern, the Dataverse software includes both an API call (see :ref:`file-uningest` in the :doc:`/api/native-api` guide) -and an Edit/Uningest menu option displayed on the file page, that allow a file to be uningested. These are only available to superusers. -Uningest will remove the warning. Uningest can also be done for a file that was successfully ingested. -This will remove the .tab version of the file that was generated. +and an Edit/Uningest menu option displayed on the file page, that allow a file to be uningested by anone who can publish the dataset. + +Uningest will remove the warning. Uningest can also be done for a file that was successfully ingested. This is only available to superusers. +This will remove the variable-level metadata and the .tab version of the file that was generated. If a file is a tabular format but was never ingested, .e.g. due to the ingest file size limit being lower in the past, or if ingest had failed, e.g. in a prior Dataverse version, an reingest API (see :ref:`file-reingest` in the :doc:`/api/native-api` guide) and a file page Edit/Reingest option From 51fe60c095f52e26a6f1be7587c5323de7107993 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 16:26:50 -0500 Subject: [PATCH 0685/1112] fix logic --- .../edu/harvard/iq/dataverse/api/Files.java | 31 +++++++++---------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 1f0e0801c68..d48ae3247b5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -655,27 +655,24 @@ public Response uningestDatafile(@Context ContainerRequestContext crc, @PathPara } dataFile.setIngestDone(); dataFile.setIngestReport(null); + return ok("Datafile " + dataFile.getId() + " uningested."); } else { return error(Response.Status.BAD_REQUEST, - BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); + BundleUtil.getStringFromBundle("Cannot uningest non-tabular file.")); + } + } else { + try { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + execCommand(new UningestFileCommand(req, dataFile)); + Long dataFileId = dataFile.getId(); + dataFile = fileService.find(dataFileId); + Dataset theDataset = dataFile.getOwner(); + exportDatasetMetadata(settingsService, theDataset); + return ok("Datafile " + dataFileId + " uningested."); + } catch (WrappedResponse wr) { + return wr.getResponse(); } } - if (!dataFile.isTabularData()) { - return error(Response.Status.BAD_REQUEST, "Cannot uningest non-tabular file."); - } - - try { - DataverseRequest req = createDataverseRequest(getRequestUser(crc)); - execCommand(new UningestFileCommand(req, dataFile)); - Long dataFileId = dataFile.getId(); - dataFile = fileService.find(dataFileId); - Dataset theDataset = dataFile.getOwner(); - exportDatasetMetadata(settingsService, theDataset); - return ok("Datafile " + dataFileId + " uningested."); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - } // reingest attempts to queue an *existing* DataFile From 31d7cbcea224d253325f9baa3b4f4f1d8e802882 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 17:38:19 -0500 Subject: [PATCH 0686/1112] typo/merge issues --- src/main/webapp/file-edit-button-fragment.xhtml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index fd455521c98..30c3f6e7938 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -79,9 +79,9 @@ - +
  • - +
  • From 057d2c3c5d9a00b38354416dbaa70ee6637bbe43 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 17:49:28 -0500 Subject: [PATCH 0687/1112] missing save --- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index d48ae3247b5..f735ecfdec8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -655,6 +655,7 @@ public Response uningestDatafile(@Context ContainerRequestContext crc, @PathPara } dataFile.setIngestDone(); dataFile.setIngestReport(null); + fileService.save(dataFile); return ok("Datafile " + dataFile.getId() + " uningested."); } else { return error(Response.Status.BAD_REQUEST, From beb5bf6847469ab9b41b3128837d5c9d4daddf24 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 17:55:59 -0500 Subject: [PATCH 0688/1112] tweak api doc --- doc/sphinx-guides/source/api/native-api.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 8cfa5deb96c..1b04d7c9e12 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2859,7 +2859,10 @@ The fully expanded example above (without environment variables) looks like this Uningest a File ~~~~~~~~~~~~~~~ -Reverse the tabular data ingest process performed on a file where ``ID`` is the database id or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file to process. Note that this requires "superuser" credentials. +Reverse the tabular data ingest process performed on a file where ``ID`` is the database id or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file to process. + +Note that this requires "superuser" credentials to undo a successful ingest and remove the variable-level metadata and .tab version of the file. +It can also be used by a user who can publish the dataset to clear the error from an unsuccessful ingest. A curl example using an ``ID``: From 00d418912d88e202a390a6c2d70d80efb0ec5bfc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 18:02:53 -0500 Subject: [PATCH 0689/1112] changelog, release note tweaks --- doc/release-notes/10318-uningest-and-reingest.md | 5 +++-- doc/sphinx-guides/source/api/changelog.rst | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/10318-uningest-and-reingest.md b/doc/release-notes/10318-uningest-and-reingest.md index 7465f934330..9f6f81b4818 100644 --- a/doc/release-notes/10318-uningest-and-reingest.md +++ b/doc/release-notes/10318-uningest-and-reingest.md @@ -1,2 +1,3 @@ -New Uningest/Reingest options are available in the File Page Edit menu for superusers, allowing ingest errors to be cleared and for -ingest to be retried (e.g. after a Dataverse version update or if ingest size limits are changed). +New Uningest/Reingest options are available in the File Page Edit menu, allowing ingest errors to be cleared (by users who can published the associated dataset) +and (by suerpsuers) for a successful ingest to be undone or retried (e.g. after a Dataverse version update or if ingest size limits are changed). +The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index d272086fa2e..99414550c4b 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -12,6 +12,7 @@ v6.2 - **/api/datasets/{id}/versions/{versionId}**: The includeFiles parameter has been renamed to excludeFiles. The default behavior remains the same, which is to include files. However, when excludeFiles is set to true, the files will be excluded. A bug that caused the API to only return a deaccessioned dataset if the user had edit privileges has been fixed. - **/api/datasets/{id}/versions**: The includeFiles parameter has been renamed to excludeFiles. The default behavior remains the same, which is to include files. However, when excludeFiles is set to true, the files will be excluded. +- **/api/files/$ID/uningest**: Can now be used by users with the ability to publish the dataset to undo a failed ingest. (Removing a successful ingest still requires being superuser) v6.1 ---- From ab0abaf83ca854214bf2a589ab642d4187ffa3fd Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Thu, 15 Feb 2024 13:45:34 +0000 Subject: [PATCH 0690/1112] Removed: double quotes in docs for DATASET_VERSION value --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5be73c01194..4038ec4340d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3519,7 +3519,7 @@ When the dataset version is published, authentication is not required: export SERVER_URL=https://demo.dataverse.org export FILE_ID=42 - export DATASET_VERSION=":latest-published" + export DATASET_VERSION=:latest-published curl "$SERVER_URL/api/files/$FILE_ID/versions/$DATASET_VERSION/citation" From d82c730b9f3ef0b1ba570878ea1814ea51dc073e Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 15 Feb 2024 11:00:39 -0500 Subject: [PATCH 0691/1112] adding harvesting feature to handle missing controlled values --- ...92-harvest-metadata-values-not-in-cvv-list | 6 ++++ .../settings/SettingsServiceBean.java | 7 +++- .../iq/dataverse/util/SystemConfig.java | 7 ++++ .../iq/dataverse/util/json/JsonParser.java | 34 +++++++++++-------- .../iq/dataverse/api/HarvestingClientsIT.java | 31 ++++++++++++++--- 5 files changed, 64 insertions(+), 21 deletions(-) create mode 100644 doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list diff --git a/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list b/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list new file mode 100644 index 00000000000..64ea2e1166a --- /dev/null +++ b/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list @@ -0,0 +1,6 @@ + +`AllowHarvestingMissingCVV` setting to enable/disable allowing datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. +The default value of this setting is false/no which will cause the harvesting of the dataset to fail. +By activating this feature (true/yes) the value in question will be removed from the list of values and the dataset will be harvested without the missing value. + +`curl http://localhost:8080/api/admin/settings/:AllowHarvestingMissingCVV -X PUT -d yes` diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 3b7632f3d9e..6ed17d93ee3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -603,7 +603,12 @@ Whether Harvesting (OAI) service is enabled * When ingesting tabular data files, store the generated tab-delimited * files *with* the variable names line up top. */ - StoreIngestedTabularFilesWithVarHeaders + StoreIngestedTabularFilesWithVarHeaders, + + /** + * Should we ignore missing controlled vocabulary values when harvesting + */ + AllowHarvestingMissingCVV ; @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index ded394833f1..b2127cc263d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1181,4 +1181,11 @@ public Long getTestStorageQuotaLimit() { public boolean isStoringIngestedFilesWithHeaders() { return settingsService.isTrueForKey(SettingsServiceBean.Key.StoreIngestedTabularFilesWithVarHeaders, false); } + + /** + * Should we ignore missing controlled vocabulary values when harvesting + */ + public boolean allowHarvestingMissingCVV() { + return settingsService.isTrueForKey(SettingsServiceBean.Key.AllowHarvestingMissingCVV, false); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 984c607aac7..cd93f4719cd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -38,7 +38,6 @@ import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -69,7 +68,8 @@ public class JsonParser { MetadataBlockServiceBean blockService; SettingsServiceBean settingsService; LicenseServiceBean licenseService; - HarvestingClient harvestingClient = null; + HarvestingClient harvestingClient = null; + boolean allowHarvestingMissingCVV = false; /** * if lenient, we will accept alternate spellings for controlled vocabulary values @@ -93,6 +93,7 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB this.settingsService = settingsService; this.licenseService = licenseService; this.harvestingClient = harvestingClient; + this.allowHarvestingMissingCVV = (harvestingClient != null && settingsService.isTrueForKey(SettingsServiceBean.Key.AllowHarvestingMissingCVV, false)); } public JsonParser() { @@ -931,30 +932,30 @@ private String jsonValueToString(JsonValue jv) { } public List parseControlledVocabularyValue(DatasetFieldType cvvType, JsonObject json) throws JsonParseException { + List vals = new LinkedList<>(); try { if (cvvType.isAllowMultiples()) { try { json.getJsonArray("value").getValuesAs(JsonObject.class); } catch (ClassCastException cce) { throw new JsonParseException("Invalid values submitted for " + cvvType.getName() + ". It should be an array of values."); - } - List vals = new LinkedList<>(); + } for (JsonString strVal : json.getJsonArray("value").getValuesAs(JsonString.class)) { String strValue = strVal.getString(); ControlledVocabularyValue cvv = datasetFieldSvc.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(cvvType, strValue, lenient); - if (cvv == null) { + if (cvv == null && !allowHarvestingMissingCVV) { throw new ControlledVocabularyException("Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'", cvvType, strValue); } - // Only add value to the list if it is not a duplicate - if (strValue.equals("Other")) { - System.out.println("vals = " + vals + ", contains: " + vals.contains(cvv)); - } - if (!vals.contains(cvv)) { - vals.add(cvv); + if (cvv != null) { + // Only add value to the list if it is not a duplicate + if (strValue.equals("Other")) { + System.out.println("vals = " + vals + ", contains: " + vals.contains(cvv)); + } + if (!vals.contains(cvv)) { + vals.add(cvv); + } } } - return vals; - } else { try { json.getString("value"); @@ -963,11 +964,14 @@ public List parseControlledVocabularyValue(DatasetFie } String strValue = json.getString("value", ""); ControlledVocabularyValue cvv = datasetFieldSvc.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(cvvType, strValue, lenient); - if (cvv == null) { + if (cvv == null && !allowHarvestingMissingCVV) { throw new ControlledVocabularyException("Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'", cvvType, strValue); } - return Collections.singletonList(cvv); + if (cvv != null) { + vals.add(cvv); + } } + return vals; } catch (ClassCastException cce) { throw new JsonParseException("Invalid values submitted for " + cvvType.getName()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index d5388e510d2..36ef947e105 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -2,6 +2,8 @@ import java.util.logging.Logger; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import io.restassured.RestAssured; @@ -37,8 +39,8 @@ public class HarvestingClientsIT { private static final String ARCHIVE_URL = "https://demo.dataverse.org"; private static final String HARVEST_METADATA_FORMAT = "oai_dc"; private static final String ARCHIVE_DESCRIPTION = "RestAssured harvesting client test"; - private static final String CONTROL_OAI_SET = "controlTestSet"; - private static final int DATASETS_IN_CONTROL_SET = 7; + private static final String CONTROL_OAI_SET = "controlTestSet2"; + private static final int DATASETS_IN_CONTROL_SET = 8; private static String normalUserAPIKey; private static String adminUserAPIKey; private static String harvestCollectionAlias; @@ -54,6 +56,10 @@ public static void setUpClass() { setupCollection(); } + @AfterEach + public void cleanup() { + UtilIT.deleteSetting(SettingsServiceBean.Key.AllowHarvestingMissingCVV); + } private static void setupUsers() { Response cu0 = UtilIT.createRandomUser(); @@ -157,9 +163,24 @@ public void testCreateEditDeleteClient() throws InterruptedException { logger.info("rDelete.getStatusCode(): " + rDelete.getStatusCode()); assertEquals(OK.getStatusCode(), rDelete.getStatusCode()); } - + + @Test + public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { + harvestingClientRun(true); + } @Test - public void testHarvestingClientRun() throws InterruptedException { + public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws InterruptedException { + harvestingClientRun(false); + } + + private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws InterruptedException { + int expectedNumberOfSetsHarvested = allowHarvestingMissingCVV ? DATASETS_IN_CONTROL_SET : DATASETS_IN_CONTROL_SET - 1; + if (allowHarvestingMissingCVV) { + UtilIT.enableSetting(SettingsServiceBean.Key.AllowHarvestingMissingCVV); + } else { + UtilIT.deleteSetting(SettingsServiceBean.Key.AllowHarvestingMissingCVV); + } + // This test will create a client and attempt to perform an actual // harvest and validate the resulting harvested content. @@ -242,7 +263,7 @@ public void testHarvestingClientRun() throws InterruptedException { assertEquals(harvestTimeStamp, responseJsonPath.getString("data.lastNonEmpty")); // d) Confirm that the correct number of datasets have been harvested: - assertEquals(DATASETS_IN_CONTROL_SET, responseJsonPath.getInt("data.lastDatasetsHarvested")); + assertEquals(expectedNumberOfSetsHarvested, responseJsonPath.getInt("data.lastDatasetsHarvested")); // ok, it looks like the harvest has completed successfully. break; From 34d7802622f6d38fec9debcd6ee88798c20bd358 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 15 Feb 2024 11:42:11 -0500 Subject: [PATCH 0692/1112] add .md to release notes file --- ...n-cvv-list => 9992-harvest-metadata-values-not-in-cvv-list.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/release-notes/{9992-harvest-metadata-values-not-in-cvv-list => 9992-harvest-metadata-values-not-in-cvv-list.md} (100%) diff --git a/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list b/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list.md similarity index 100% rename from doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list rename to doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list.md From 87b5a38bd5511a169f1ccae9d3bb966f2e3cb6b6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 15 Feb 2024 17:01:59 -0500 Subject: [PATCH 0693/1112] typo #10318 --- .../source/user/tabulardataingest/ingestprocess.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index 418eb2206c8..1e481a54da6 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -67,7 +67,7 @@ who can see the draft version of the dataset containing the file that will indic part of the dataset, there will be no indication that ingest was attempted and failed. If the warning message is a concern, the Dataverse software includes both an API call (see :ref:`file-uningest` in the :doc:`/api/native-api` guide) -and an Edit/Uningest menu option displayed on the file page, that allow a file to be uningested by anone who can publish the dataset. +and an Edit/Uningest menu option displayed on the file page, that allow a file to be uningested by anyone who can publish the dataset. Uningest will remove the warning. Uningest can also be done for a file that was successfully ingested. This is only available to superusers. This will remove the variable-level metadata and the .tab version of the file that was generated. From 95ce492f221a5980729c6b26528feb1ac681c56b Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 16 Feb 2024 10:38:56 +0000 Subject: [PATCH 0694/1112] Changed: includeDeaccessioned optional param in getLatestPublishedFileMetadata, and existing usages changed --- .../edu/harvard/iq/dataverse/DataFile.java | 8 +++-- .../edu/harvard/iq/dataverse/api/EditDDI.java | 2 +- .../edu/harvard/iq/dataverse/api/Files.java | 2 +- ...stractGetPublishedFileMetadataCommand.java | 31 +++++++++++++++++++ ...etDraftFileMetadataIfAvailableCommand.java | 4 +-- ...etLatestAccessibleFileMetadataCommand.java | 4 +-- ...GetLatestPublishedFileMetadataCommand.java | 19 +++++------- ...edFileMetadataByDatasetVersionCommand.java | 22 +++---------- .../MakeDataCountLoggingServiceBean.java | 2 +- 9 files changed, 56 insertions(+), 38 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractGetPublishedFileMetadataCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 818cade1eef..de13a83e204 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -567,9 +567,13 @@ public FileMetadata getLatestFileMetadata() { return resultFileMetadata; } - public FileMetadata getLatestPublishedFileMetadata() throws UnsupportedOperationException { + public FileMetadata getLatestPublishedFileMetadata(boolean includeDeaccessioned) throws UnsupportedOperationException { FileMetadata resultFileMetadata = fileMetadatas.stream() - .filter(metadata -> !metadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) + .filter(metadata -> { + VersionState versionState = metadata.getDatasetVersion().getVersionState(); + return (!versionState.equals(VersionState.DRAFT) && + !(versionState.equals(VersionState.DEACCESSIONED) && !includeDeaccessioned)); + }) .reduce(null, this::getTheNewerFileMetadata); if (resultFileMetadata == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java index 1b74ab5479e..d6aee0b7bfc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java @@ -124,7 +124,7 @@ public Response edit(@Context ContainerRequestContext crc, InputStream body, @Pa if (!latestVersion.isWorkingCopy()) { //for new draft version - FileMetadata latestFml = dataFile.getLatestPublishedFileMetadata(); + FileMetadata latestFml = dataFile.getLatestPublishedFileMetadata(true); boolean groupUpdate = newGroups(varGroupMap, latestFml); boolean varUpdate = varUpdates(mapVarToVarMet, latestFml, neededToUpdateVM, true); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 55d65bae96b..b7494c5daec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -580,7 +580,7 @@ public Response getFileMetadata(@Context ContainerRequestContext crc, @PathParam return error(BAD_REQUEST, BundleUtil.getStringFromBundle("files.api.no.draft")); } } else { - fm = df.getLatestPublishedFileMetadata(); + fm = df.getLatestPublishedFileMetadata(false); MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountLoggingServiceBean.MakeDataCountEntry(uriInfo, headers, dvRequestService, df); mdcLogService.logEntry(entry); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractGetPublishedFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractGetPublishedFileMetadataCommand.java new file mode 100644 index 00000000000..82d0ac3491b --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractGetPublishedFileMetadataCommand.java @@ -0,0 +1,31 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; + +@RequiredPermissions({}) +abstract class AbstractGetPublishedFileMetadataCommand extends AbstractCommand { + protected final DataFile dataFile; + protected final boolean includeDeaccessioned; + + public AbstractGetPublishedFileMetadataCommand(DataverseRequest request, DataFile dataFile, boolean includeDeaccessioned) { + super(request, dataFile); + this.dataFile = dataFile; + this.includeDeaccessioned = includeDeaccessioned; + } + + protected boolean isDatasetVersionAccessible(DatasetVersion datasetVersion, Dataset ownerDataset, CommandContext ctxt) { + return datasetVersion.isReleased() || isDatasetVersionDeaccessionedAndAccessible(datasetVersion, ownerDataset, ctxt); + } + + private boolean isDatasetVersionDeaccessionedAndAccessible(DatasetVersion datasetVersion, Dataset ownerDataset, CommandContext ctxt) { + return includeDeaccessioned && datasetVersion.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ownerDataset).has(Permission.EditDataset); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java index e0f8ca1fcf8..8ed058d79f8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftFileMetadataIfAvailableCommand.java @@ -16,8 +16,8 @@ public class GetDraftFileMetadataIfAvailableCommand extends AbstractCommand { private final DataFile dataFile; - public GetDraftFileMetadataIfAvailableCommand(DataverseRequest aRequest, DataFile dataFile) { - super(aRequest, dataFile); + public GetDraftFileMetadataIfAvailableCommand(DataverseRequest request, DataFile dataFile) { + super(request, dataFile); this.dataFile = dataFile; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java index fa80b75c593..98913d63471 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java @@ -14,8 +14,8 @@ public class GetLatestAccessibleFileMetadataCommand extends AbstractCommand { - private final DataFile dataFile; - private final boolean includeDeaccessioned; +public class GetLatestPublishedFileMetadataCommand extends AbstractGetPublishedFileMetadataCommand { - public GetLatestPublishedFileMetadataCommand(DataverseRequest aRequest, DataFile dataFile, boolean includeDeaccessioned) { - super(aRequest, dataFile); - this.dataFile = dataFile; - this.includeDeaccessioned = includeDeaccessioned; + public GetLatestPublishedFileMetadataCommand(DataverseRequest request, DataFile dataFile, boolean includeDeaccessioned) { + super(request, dataFile, includeDeaccessioned); } @Override public FileMetadata execute(CommandContext ctxt) throws CommandException { try { - return dataFile.getLatestPublishedFileMetadata(); + FileMetadata fileMetadata = dataFile.getLatestPublishedFileMetadata(includeDeaccessioned); + if (isDatasetVersionAccessible(fileMetadata.getDatasetVersion(), dataFile.getOwner(), ctxt)) { + return fileMetadata; + } + return null; } catch (UnsupportedOperationException e) { return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java index 82350d3bd95..deffbfb57ee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java @@ -1,29 +1,20 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -@RequiredPermissions({}) -public class GetSpecificPublishedFileMetadataByDatasetVersionCommand extends AbstractCommand { +public class GetSpecificPublishedFileMetadataByDatasetVersionCommand extends AbstractGetPublishedFileMetadataCommand { private final long majorVersion; private final long minorVersion; - private final DataFile dataFile; - private final boolean includeDeaccessioned; - public GetSpecificPublishedFileMetadataByDatasetVersionCommand(DataverseRequest aRequest, DataFile dataFile, long majorVersion, long minorVersion, boolean includeDeaccessioned) { - super(aRequest, dataFile); - this.dataFile = dataFile; + public GetSpecificPublishedFileMetadataByDatasetVersionCommand(DataverseRequest request, DataFile dataFile, long majorVersion, long minorVersion, boolean includeDeaccessioned) { + super(request, dataFile, includeDeaccessioned); this.majorVersion = majorVersion; this.minorVersion = minorVersion; - this.includeDeaccessioned = includeDeaccessioned; } @Override @@ -36,13 +27,8 @@ public FileMetadata execute(CommandContext ctxt) throws CommandException { private boolean isRequestedVersionFileMetadata(FileMetadata fileMetadata, CommandContext ctxt) { DatasetVersion datasetVersion = fileMetadata.getDatasetVersion(); - Dataset ownerDataset = dataFile.getOwner(); - return (datasetVersion.isReleased() || isDatasetVersionDeaccessionedAndAccessible(datasetVersion, ownerDataset, ctxt)) + return isDatasetVersionAccessible(datasetVersion, dataFile.getOwner(), ctxt) && datasetVersion.getVersionNumber().equals(majorVersion) && datasetVersion.getMinorVersionNumber().equals(minorVersion); } - - private boolean isDatasetVersionDeaccessionedAndAccessible(DatasetVersion datasetVersion, Dataset ownerDataset, CommandContext ctxt) { - return includeDeaccessioned && datasetVersion.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ownerDataset).has(Permission.EditDataset); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java index 5edf2fde0c3..a3f09d190ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java @@ -133,7 +133,7 @@ public MakeDataCountEntry(FacesContext fc, DataverseRequestServiceBean dvRequest //Exception thrown if no published metadata exists for DataFile //This is passed a DataFile to log the file downloaded. uriInfo and headers are passed in lieu of FacesContext public MakeDataCountEntry(UriInfo uriInfo, HttpHeaders headers, DataverseRequestServiceBean dvRequestService, DataFile df) throws UnsupportedOperationException{ - this(null, dvRequestService, df.getLatestPublishedFileMetadata().getDatasetVersion()); + this(null, dvRequestService, df.getLatestPublishedFileMetadata(false).getDatasetVersion()); if(uriInfo != null) { setRequestUrl(uriInfo.getRequestUri().toString()); From 6d79c6b1dc313263dd5b01e87e1817c7d00dd703 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Fri, 16 Feb 2024 07:19:46 -0500 Subject: [PATCH 0695/1112] #10326 update installation-main.rst to reference Python installer --- doc/sphinx-guides/source/installation/installation-main.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 46c1b0b0af3..bc51a8e19f5 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -68,7 +68,7 @@ The script will prompt you for some configuration values. If this is a test/eval If desired, these default values can be configured by creating a ``default.config`` (example :download:`here <../../../../scripts/installer/default.config>`) file in the installer's working directory with new values (if this file isn't present, the above defaults will be used). -This allows the installer to be run in non-interactive mode (with ``./install -y -f > install.out 2> install.err``), which can allow for easier interaction with automated provisioning tools. +This allows the installer to be run in non-interactive mode (with ``./install.py -y -f > install.out 2> install.err``), which can allow for easier interaction with automated provisioning tools. All the Payara configuration tasks performed by the installer are isolated in the shell script ``dvinstall/as-setup.sh`` (as ``asadmin`` commands). From 213b0256fc41ec745979f4a85dd3e259e2c218c1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 16 Feb 2024 14:46:50 +0100 Subject: [PATCH 0696/1112] test(mail): add more tests for mail session producer with invalid config Also fix minor linting with visibility of test methods. --- .../iq/dataverse/MailServiceBeanTest.java | 4 +- .../dataverse/util/MailSessionProducerIT.java | 48 +++++++++++++++++-- 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java index f8a01c53298..afcc12949d6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java @@ -35,11 +35,11 @@ class Delegation { * We need to reset the BrandingUtil mocks for every test, as we rely on them being set to default. */ @BeforeEach - private void setup() { + void setup() { BrandingUtilTest.setupMocks(); } @AfterAll - private static void tearDown() { + static void tearDown() { BrandingUtilTest.tearDownMocks(); } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java b/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java index dcf04b7644a..8280578a343 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.MailServiceBean; import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.branding.BrandingUtilTest; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.testing.JvmSetting; @@ -10,14 +11,12 @@ import edu.harvard.iq.dataverse.util.testing.Tags; import io.restassured.RestAssured; import jakarta.mail.Session; -import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.testcontainers.containers.GenericContainer; @@ -30,6 +29,9 @@ import static io.restassured.RestAssured.given; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; /** @@ -43,6 +45,8 @@ @Tag(Tags.USES_TESTCONTAINERS) @Testcontainers(disabledWithoutDocker = true) @ExtendWith(MockitoExtension.class) +@LocalJvmSettings +@JvmSetting(key = JvmSettings.SYSTEM_EMAIL, value = "test@test.com") class MailSessionProducerIT { private static final Integer PORT_SMTP = 1025; @@ -51,15 +55,21 @@ class MailSessionProducerIT { static SettingsServiceBean settingsServiceBean = Mockito.mock(SettingsServiceBean.class);; static DataverseServiceBean dataverseServiceBean = Mockito.mock(DataverseServiceBean.class);; + /** + * We need to reset the BrandingUtil mocks for every test, as we rely on them being set to default. + */ @BeforeAll static void setUp() { // Setup mocks behavior, inject as deps BrandingUtil.injectServices(dataverseServiceBean, settingsServiceBean); } + @AfterAll + static void tearDown() { + BrandingUtilTest.tearDownMocks(); + } @Nested @LocalJvmSettings - @JvmSetting(key = JvmSettings.SYSTEM_EMAIL, value = "test@test.com") @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, method = "tcSmtpHost") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpPort", varArgs = "port") class WithoutAuthentication { @@ -113,7 +123,6 @@ void createSession() { @Nested @LocalJvmSettings - @JvmSetting(key = JvmSettings.SYSTEM_EMAIL, value = "test@test.com") @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, method = "tcSmtpHost") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpPort", varArgs = "port") @JvmSetting(key = JvmSettings.MAIL_MTA_AUTH, value = "yes") @@ -169,4 +178,33 @@ void createSession() { } + @Nested + @LocalJvmSettings + class InvalidConfiguration { + @Test + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, value = "1234", varArgs = "invalid") + void invalidConfigItemsAreIgnoredOnSessionBuild() { + assertDoesNotThrow(() -> new MailSessionProducer().getSession()); + + Session mailSession = new MailSessionProducer().getSession(); + MailServiceBean mailer = new MailServiceBean(mailSession, settingsServiceBean); + assertFalse(mailer.sendSystemEmail("test@example.org", "Test", "Test", false)); + } + + @Test + @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, value = "foobar") + void invalidHostnameIsFailingWhenSending() { + assertDoesNotThrow(() -> new MailSessionProducer().getSession()); + + Session mailSession = new MailSessionProducer().getSession(); + MailServiceBean mailer = new MailServiceBean(mailSession, settingsServiceBean); + assertFalse(mailer.sendSystemEmail("test@example.org", "Test", "Test", false)); + } + + @Test + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, varArgs = "port" , value = "foobar") + void invalidPortWithLetters() { + assertThrows(IllegalArgumentException.class, () -> new MailSessionProducer().getSession()); + } + } } \ No newline at end of file From 32d2fa4fa8a26783f6055d1715cb667a3b3ae4d1 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 16 Feb 2024 11:06:18 -0500 Subject: [PATCH 0697/1112] modify for comments --- .../iq/dataverse/util/json/JsonParser.java | 11 +++----- .../iq/dataverse/api/HarvestingClientsIT.java | 25 ++++++++++--------- 2 files changed, 16 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index cd93f4719cd..bd756fffdbf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -946,14 +946,9 @@ public List parseControlledVocabularyValue(DatasetFie if (cvv == null && !allowHarvestingMissingCVV) { throw new ControlledVocabularyException("Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'", cvvType, strValue); } - if (cvv != null) { - // Only add value to the list if it is not a duplicate - if (strValue.equals("Other")) { - System.out.println("vals = " + vals + ", contains: " + vals.contains(cvv)); - } - if (!vals.contains(cvv)) { - vals.add(cvv); - } + // Only add value to the list if it is not a duplicate + if (cvv != null && !vals.contains(cvv)) { + vals.add(cvv); } } } else { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 36ef947e105..71d4fc14ad5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -43,7 +43,8 @@ public class HarvestingClientsIT { private static final int DATASETS_IN_CONTROL_SET = 8; private static String normalUserAPIKey; private static String adminUserAPIKey; - private static String harvestCollectionAlias; + private static String harvestCollectionAlias; + String clientApiPath = null; @BeforeAll public static void setUpClass() { @@ -59,6 +60,15 @@ public static void setUpClass() { @AfterEach public void cleanup() { UtilIT.deleteSetting(SettingsServiceBean.Key.AllowHarvestingMissingCVV); + // Cleanup: delete the client + if (clientApiPath != null) { + Response deleteResponse = given() + .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey) + .delete(clientApiPath); + System.out.println("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); + assertEquals(OK.getStatusCode(), deleteResponse.getStatusCode()); + clientApiPath = null; + } } private static void setupUsers() { @@ -191,7 +201,7 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte String nickName = "h" + UtilIT.getRandomString(6); - String clientApiPath = String.format(HARVEST_CLIENTS_API+"%s", nickName); + clientApiPath = String.format(HARVEST_CLIENTS_API+"%s", nickName); String clientJson = String.format("{\"dataverseAlias\":\"%s\"," + "\"type\":\"oai\"," + "\"harvestUrl\":\"%s\"," @@ -279,15 +289,6 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte // datasets have been harvested. This may or may not be necessary, seeing // how we have already confirmed the number of successfully harvested // datasets from the control set; somewhat hard to imagine a practical - // situation where that would not be enough (?). - - // Cleanup: delete the client - - Response deleteResponse = given() - .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey) - .delete(clientApiPath); - System.out.println("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); - assertEquals(OK.getStatusCode(), deleteResponse.getStatusCode()); - + // situation where that would not be enough (?). } } From 97678807454cd3834f5dc4a59c50599f326e14dd Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 16 Feb 2024 13:09:54 -0500 Subject: [PATCH 0698/1112] addressing review comments --- .../iq/dataverse/util/json/JsonParser.java | 29 +++++++++++-------- .../iq/dataverse/api/HarvestingClientsIT.java | 10 +++---- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index bd756fffdbf..ac7b6bb4067 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -38,6 +38,7 @@ import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -69,7 +70,6 @@ public class JsonParser { SettingsServiceBean settingsService; LicenseServiceBean licenseService; HarvestingClient harvestingClient = null; - boolean allowHarvestingMissingCVV = false; /** * if lenient, we will accept alternate spellings for controlled vocabulary values @@ -93,7 +93,6 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB this.settingsService = settingsService; this.licenseService = licenseService; this.harvestingClient = harvestingClient; - this.allowHarvestingMissingCVV = (harvestingClient != null && settingsService.isTrueForKey(SettingsServiceBean.Key.AllowHarvestingMissingCVV, false)); } public JsonParser() { @@ -738,7 +737,14 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar ret.setDatasetFieldType(type); - + + // If Harvesting, CVV values may differ between the Dataverse installations, so we won't enforce them + if (harvestingClient != null && type.isControlledVocabulary() && + settingsService.isTrueForKey(SettingsServiceBean.Key.AllowHarvestingMissingCVV, false)) { + type.setAllowControlledVocabulary(false); + logger.warning("Harvesting: Skipping Controlled Vocabulary. Treating values as primitives"); + } + if (type.isCompound()) { List vals = parseCompoundValue(type, json, testType); for (DatasetFieldCompoundValue dsfcv : vals) { @@ -930,9 +936,8 @@ private String jsonValueToString(JsonValue jv) { default: return jv.toString(); } } - + public List parseControlledVocabularyValue(DatasetFieldType cvvType, JsonObject json) throws JsonParseException { - List vals = new LinkedList<>(); try { if (cvvType.isAllowMultiples()) { try { @@ -940,17 +945,20 @@ public List parseControlledVocabularyValue(DatasetFie } catch (ClassCastException cce) { throw new JsonParseException("Invalid values submitted for " + cvvType.getName() + ". It should be an array of values."); } + List vals = new LinkedList<>(); for (JsonString strVal : json.getJsonArray("value").getValuesAs(JsonString.class)) { String strValue = strVal.getString(); ControlledVocabularyValue cvv = datasetFieldSvc.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(cvvType, strValue, lenient); - if (cvv == null && !allowHarvestingMissingCVV) { + if (cvv == null) { throw new ControlledVocabularyException("Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'", cvvType, strValue); } // Only add value to the list if it is not a duplicate - if (cvv != null && !vals.contains(cvv)) { + if (!vals.contains(cvv)) { vals.add(cvv); } } + return vals; + } else { try { json.getString("value"); @@ -959,14 +967,11 @@ public List parseControlledVocabularyValue(DatasetFie } String strValue = json.getString("value", ""); ControlledVocabularyValue cvv = datasetFieldSvc.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(cvvType, strValue, lenient); - if (cvv == null && !allowHarvestingMissingCVV) { + if (cvv == null) { throw new ControlledVocabularyException("Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'", cvvType, strValue); } - if (cvv != null) { - vals.add(cvv); - } + return Collections.singletonList(cvv); } - return vals; } catch (ClassCastException cce) { throw new JsonParseException("Invalid values submitted for " + cvvType.getName()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 71d4fc14ad5..9b83c4c1c9a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -60,14 +60,12 @@ public static void setUpClass() { @AfterEach public void cleanup() { UtilIT.deleteSetting(SettingsServiceBean.Key.AllowHarvestingMissingCVV); - // Cleanup: delete the client if (clientApiPath != null) { Response deleteResponse = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey) .delete(clientApiPath); - System.out.println("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); - assertEquals(OK.getStatusCode(), deleteResponse.getStatusCode()); clientApiPath = null; + System.out.println("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); } } @@ -175,11 +173,11 @@ public void testCreateEditDeleteClient() throws InterruptedException { } @Test - public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { - harvestingClientRun(true); + public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws InterruptedException { + harvestingClientRun(false); } @Test - public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws InterruptedException { + public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { harvestingClientRun(false); } From 61fa5719e73c7d3605268cca0fe4670cac9f4439 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 19 Feb 2024 01:00:36 +0000 Subject: [PATCH 0699/1112] Fixed: includeDeaccessioned wrong behavior in getFileInfo --- .../edu/harvard/iq/dataverse/DataFile.java | 12 +-- .../edu/harvard/iq/dataverse/api/EditDDI.java | 2 +- .../edu/harvard/iq/dataverse/api/Files.java | 2 +- ...GetLatestPublishedFileMetadataCommand.java | 16 ++-- .../MakeDataCountLoggingServiceBean.java | 2 +- .../edu/harvard/iq/dataverse/api/FilesIT.java | 84 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 5 ++ 7 files changed, 89 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index de13a83e204..25ec40de845 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -567,14 +567,10 @@ public FileMetadata getLatestFileMetadata() { return resultFileMetadata; } - public FileMetadata getLatestPublishedFileMetadata(boolean includeDeaccessioned) throws UnsupportedOperationException { + public FileMetadata getLatestPublishedFileMetadata() throws UnsupportedOperationException { FileMetadata resultFileMetadata = fileMetadatas.stream() - .filter(metadata -> { - VersionState versionState = metadata.getDatasetVersion().getVersionState(); - return (!versionState.equals(VersionState.DRAFT) && - !(versionState.equals(VersionState.DEACCESSIONED) && !includeDeaccessioned)); - }) - .reduce(null, this::getTheNewerFileMetadata); + .filter(metadata -> !metadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) + .reduce(null, DataFile::getTheNewerFileMetadata); if (resultFileMetadata == null) { throw new UnsupportedOperationException("No published metadata version for DataFile " + this.getId()); @@ -583,7 +579,7 @@ public FileMetadata getLatestPublishedFileMetadata(boolean includeDeaccessioned) return resultFileMetadata; } - private FileMetadata getTheNewerFileMetadata(FileMetadata current, FileMetadata candidate) { + public static FileMetadata getTheNewerFileMetadata(FileMetadata current, FileMetadata candidate) { if (current == null) { return candidate; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java index d6aee0b7bfc..1b74ab5479e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java @@ -124,7 +124,7 @@ public Response edit(@Context ContainerRequestContext crc, InputStream body, @Pa if (!latestVersion.isWorkingCopy()) { //for new draft version - FileMetadata latestFml = dataFile.getLatestPublishedFileMetadata(true); + FileMetadata latestFml = dataFile.getLatestPublishedFileMetadata(); boolean groupUpdate = newGroups(varGroupMap, latestFml); boolean varUpdate = varUpdates(mapVarToVarMet, latestFml, neededToUpdateVM, true); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index b7494c5daec..55d65bae96b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -580,7 +580,7 @@ public Response getFileMetadata(@Context ContainerRequestContext crc, @PathParam return error(BAD_REQUEST, BundleUtil.getStringFromBundle("files.api.no.draft")); } } else { - fm = df.getLatestPublishedFileMetadata(false); + fm = df.getLatestPublishedFileMetadata(); MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountLoggingServiceBean.MakeDataCountEntry(uriInfo, headers, dvRequestService, df); mdcLogService.logEntry(entry); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java index ea58cd4e7eb..7c07766748c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -14,14 +15,11 @@ public GetLatestPublishedFileMetadataCommand(DataverseRequest request, DataFile @Override public FileMetadata execute(CommandContext ctxt) throws CommandException { - try { - FileMetadata fileMetadata = dataFile.getLatestPublishedFileMetadata(includeDeaccessioned); - if (isDatasetVersionAccessible(fileMetadata.getDatasetVersion(), dataFile.getOwner(), ctxt)) { - return fileMetadata; - } - return null; - } catch (UnsupportedOperationException e) { - return null; - } + return dataFile.getFileMetadatas().stream().filter(fileMetadata -> { + DatasetVersion.VersionState versionState = fileMetadata.getDatasetVersion().getVersionState(); + return (!versionState.equals(DatasetVersion.VersionState.DRAFT) + && !(versionState.equals(DatasetVersion.VersionState.DEACCESSIONED) && !includeDeaccessioned) + && isDatasetVersionAccessible(fileMetadata.getDatasetVersion(), dataFile.getOwner(), ctxt)); + }).reduce(null, DataFile::getTheNewerFileMetadata); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java index a3f09d190ca..5edf2fde0c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java @@ -133,7 +133,7 @@ public MakeDataCountEntry(FacesContext fc, DataverseRequestServiceBean dvRequest //Exception thrown if no published metadata exists for DataFile //This is passed a DataFile to log the file downloaded. uriInfo and headers are passed in lieu of FacesContext public MakeDataCountEntry(UriInfo uriInfo, HttpHeaders headers, DataverseRequestServiceBean dvRequestService, DataFile df) throws UnsupportedOperationException{ - this(null, dvRequestService, df.getLatestPublishedFileMetadata(false).getDatasetVersion()); + this(null, dvRequestService, df.getLatestPublishedFileMetadata().getDatasetVersion()); if(uriInfo != null) { setRequestUrl(uriInfo.getRequestUri().toString()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 4fb667d8955..4e1be85af56 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1402,14 +1402,15 @@ public void testDataSizeInDataverse() throws InterruptedException { @Test public void testGetFileInfo() { Response createUser = UtilIT.createRandomUser(); - String username = UtilIT.getUsernameFromResponse(createUser); + String superUserUsername = UtilIT.getUsernameFromResponse(createUser); String superUserApiToken = UtilIT.getApiTokenFromResponse(createUser); - UtilIT.makeSuperUser(username); + UtilIT.makeSuperUser(superUserUsername); String dataverseAlias = createDataverseGetAlias(superUserApiToken); Integer datasetId = createDatasetGetId(dataverseAlias, superUserApiToken); createUser = UtilIT.createRandomUser(); - String apiTokenRegular = UtilIT.getApiTokenFromResponse(createUser); + String regularUsername = UtilIT.getUsernameFromResponse(createUser); + String regularApiToken = UtilIT.getApiTokenFromResponse(createUser); msg("Add a non-tabular file"); String pathToFile = "scripts/search/data/binary/trees.png"; @@ -1427,7 +1428,7 @@ public void testGetFileInfo() { .statusCode(OK.getStatusCode()); // Regular user should not get to see draft file data - getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken); getFileDataResponse.then().assertThat() .statusCode(UNAUTHORIZED.getStatusCode()); @@ -1441,7 +1442,7 @@ public void testGetFileInfo() { .statusCode(OK.getStatusCode()); // Regular user should get to see published file data - getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken); getFileDataResponse.then().assertThat() .statusCode(OK.getStatusCode()); @@ -1464,7 +1465,7 @@ public void testGetFileInfo() { .statusCode(OK.getStatusCode()); // Regular user should not get to see draft file data - getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, DS_VERSION_DRAFT); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_DRAFT); getFileDataResponse.then().assertThat() .statusCode(UNAUTHORIZED.getStatusCode()); @@ -1481,13 +1482,13 @@ public void testGetFileInfo() { updateFileMetadataResponse.then().statusCode(OK.getStatusCode()); // Regular user should get to see latest published file data - getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, DS_VERSION_LATEST_PUBLISHED); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED); getFileDataResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.label", equalTo(newFileNameFirstUpdate)); // Regular user should get to see latest published file data if latest is requested - getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, DS_VERSION_LATEST); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST); getFileDataResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.label", equalTo(newFileNameFirstUpdate)); @@ -1504,25 +1505,80 @@ public void testGetFileInfo() { .statusCode(OK.getStatusCode()); // Regular user should get to see file data by specific version number - getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, "2.0"); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "2.0"); getFileDataResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.label", equalTo(newFileNameFirstUpdate)); - getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, "3.0"); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "3.0"); getFileDataResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.label", equalTo(newFileNameSecondUpdate)); + // The following tests cover cases where the dataset version is deaccessioned + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, "3.0", "Test reason", null, superUserApiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Superuser should get to see file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameSecondUpdate)) + .statusCode(OK.getStatusCode()); + + // Superuser should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is false + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, false); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameFirstUpdate)) + .statusCode(OK.getStatusCode()); + + // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is false + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameFirstUpdate)) + .statusCode(OK.getStatusCode()); + + // Update the file metadata + String newFileNameThirdUpdate = "trees_4.png"; + updateFileMetadata = Json.createObjectBuilder() + .add("label", newFileNameThirdUpdate); + updateFileMetadataResponse = UtilIT.updateFileMetadata(dataFileId, updateFileMetadata.build().toString(), superUserApiToken); + updateFileMetadataResponse.then().statusCode(OK.getStatusCode()); + + // Superuser should get to see draft file data if draft exists filtering by latest and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameThirdUpdate)) + .statusCode(OK.getStatusCode()); + + // Regular user should get to see version 2.0 file data if the latest version is deaccessioned and draft exists filtering by latest and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameFirstUpdate)) + .statusCode(OK.getStatusCode()); + + // Publish dataset once again + publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", superUserApiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Regular user should get to see file data if the latest version is not deaccessioned filtering by latest and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameThirdUpdate)) + .statusCode(OK.getStatusCode()); + // Cleanup Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, superUserApiToken); - assertEquals(200, destroyDatasetResponse.getStatusCode()); + destroyDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, superUserApiToken); - assertEquals(200, deleteDataverseResponse.getStatusCode()); + deleteDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response deleteUserResponse = UtilIT.deleteUser(username); - assertEquals(200, deleteUserResponse.getStatusCode()); + Response deleteUserResponse = UtilIT.deleteUser(superUserUsername); + deleteUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + + deleteUserResponse = UtilIT.deleteUser(regularUsername); + deleteUserResponse.then().assertThat().statusCode(OK.getStatusCode()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index a3d894c7a52..a63d0521a24 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1092,8 +1092,13 @@ static Response getFileData(String fileId, String apiToken) { } static Response getFileData(String fileId, String apiToken, String datasetVersionId) { + return getFileData(fileId, apiToken, datasetVersionId, false); + } + + static Response getFileData(String fileId, String apiToken, String datasetVersionId, boolean includeDeaccessioned) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("includeDeaccessioned", includeDeaccessioned) .get("/api/files/" + fileId + "/versions/" + datasetVersionId); } From d0b745499bfcb8da33575eea66f6aedfa220d849 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 19 Feb 2024 10:00:09 +0000 Subject: [PATCH 0700/1112] Added: IT testGetFileInfo cases --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 79 ++++++++++++++++++- 1 file changed, 75 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 4e1be85af56..ad86127e231 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1420,9 +1420,10 @@ public void testGetFileInfo() { // Superuser should get to see draft file data String dataFileId = addResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); Response getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken); + String newFileName = "trees.png"; getFileDataResponse.then().assertThat() - .body("data.label", equalTo("trees.png")) - .body("data.dataFile.filename", equalTo("trees.png")) + .body("data.label", equalTo(newFileName)) + .body("data.dataFile.filename", equalTo(newFileName)) .body("data.dataFile.contentType", equalTo("image/png")) .body("data.dataFile.filesize", equalTo(8361)) .statusCode(OK.getStatusCode()); @@ -1444,7 +1445,8 @@ public void testGetFileInfo() { // Regular user should get to see published file data getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken); getFileDataResponse.then().assertThat() - .statusCode(OK.getStatusCode()); + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo(newFileName)); // The following tests cover cases where a version ID is specified in the endpoint // Superuser should not get to see draft file data when no draft version exists @@ -1452,6 +1454,12 @@ public void testGetFileInfo() { getFileDataResponse.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); + // Regular user should get to see file data from specific version filtering by tag + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, "1.0"); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo(newFileName)); + // Update the file metadata String newFileNameFirstUpdate = "trees_2.png"; JsonObjectBuilder updateFileMetadata = Json.createObjectBuilder() @@ -1525,18 +1533,63 @@ public void testGetFileInfo() { .body("data.label", equalTo(newFileNameSecondUpdate)) .statusCode(OK.getStatusCode()); + // Superuser should get to see file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameSecondUpdate)) + .statusCode(OK.getStatusCode()); + // Superuser should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is false getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); - // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is false + // Superuser should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is false + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, false); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameFirstUpdate)) + .statusCode(OK.getStatusCode()); + + // Superuser should get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, "3.0", true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameSecondUpdate)) + .statusCode(OK.getStatusCode()); + + // Superuser should not get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is false + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, "3.0", false); + getFileDataResponse.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + + // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is true getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); + // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameFirstUpdate)) + .statusCode(OK.getStatusCode()); + + // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is false + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, false); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameFirstUpdate)) + .statusCode(OK.getStatusCode()); + + // Regular user should not get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "3.0", true); + getFileDataResponse.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + + // Regular user should not get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is false + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "3.0", false); + getFileDataResponse.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + // Update the file metadata String newFileNameThirdUpdate = "trees_4.png"; updateFileMetadata = Json.createObjectBuilder() @@ -1550,12 +1603,24 @@ public void testGetFileInfo() { .body("data.label", equalTo(newFileNameThirdUpdate)) .statusCode(OK.getStatusCode()); + // Superuser should get to see latest published file data if draft exists filtering by latest published and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameSecondUpdate)) + .statusCode(OK.getStatusCode()); + // Regular user should get to see version 2.0 file data if the latest version is deaccessioned and draft exists filtering by latest and includeDeaccessioned is true getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); + // Regular user should get to see version 2.0 file data if the latest version is deaccessioned and draft exists filtering by latest published and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameFirstUpdate)) + .statusCode(OK.getStatusCode()); + // Publish dataset once again publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", superUserApiToken); publishDatasetResp.then().assertThat() @@ -1567,6 +1632,12 @@ public void testGetFileInfo() { .body("data.label", equalTo(newFileNameThirdUpdate)) .statusCode(OK.getStatusCode()); + // Regular user should get to see file data if the latest version is not deaccessioned filtering by latest published and includeDeaccessioned is true + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo(newFileNameThirdUpdate)) + .statusCode(OK.getStatusCode()); + // Cleanup Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, superUserApiToken); destroyDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); From a267adc66ceb02cd5864d7bf40d4637838c1c1d6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 19 Feb 2024 10:38:32 +0000 Subject: [PATCH 0701/1112] Removed: commented code in json printer for DatasetVersion --- .../iq/dataverse/util/json/JsonPrinter.java | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 2eaf6b64579..93e214159cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -356,9 +356,6 @@ public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { } public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles) { - /* return json(dsv, null, includeFiles, null); - } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles, Long numberOfFiles) {*/ Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dataset.getId()) @@ -374,8 +371,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized .add("alternativePersistentId", dataset.getAlternativePersistentIdentifier()) .add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD()) .add("citationDate", dataset.getCitationDateFormattedYYYYMMDD()); - //.add("numberOfFiles", numberOfFiles); - + License license = DatasetUtil.getLicense(dsv); if (license != null) { bld.add("license", jsonLicense(dsv)); @@ -593,6 +589,18 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { return fieldsBld; } + /* + + versionId: number +displayName: string +versionNumber: {majorNumber?: number, minorNumber?: number} +publishingStatus: string +citation: string +isLatest: boolean +isInReview: boolean +latestVersionPublishingStatus: string + */ + public static JsonObjectBuilder json(FileMetadata fmd) { return jsonObjectBuilder() // deprecated: .add("category", fmd.getCategory()) From ff2e86c19a232ac9821d045e284bb0f27bcd909b Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 19 Feb 2024 11:38:03 +0000 Subject: [PATCH 0702/1112] Added: returnDatasetVersion optional parameter to getFileInfo API endpoint --- .../edu/harvard/iq/dataverse/api/Files.java | 14 +++--- .../iq/dataverse/util/json/JsonPrinter.java | 27 +++++++---- .../edu/harvard/iq/dataverse/api/FilesIT.java | 47 ++++++++++++------- .../edu/harvard/iq/dataverse/api/UtilIT.java | 5 +- 4 files changed, 59 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index fa8332c6138..d07950d5c37 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -49,10 +49,7 @@ import jakarta.ejb.EJB; import jakarta.ejb.EJBException; import jakarta.inject.Inject; -import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonString; -import jakarta.json.JsonValue; +import jakarta.json.*; import jakarta.json.stream.JsonParsingException; import jakarta.servlet.http.HttpServletResponse; import jakarta.ws.rs.*; @@ -489,9 +486,10 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @QueryParam("returnDatasetVersion") boolean returnDatasetVersion, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> getFileDataResponse(req, fileIdOrPersistentId, DS_VERSION_LATEST, includeDeaccessioned, uriInfo, headers), getRequestUser(crc)); + return response( req -> getFileDataResponse(req, fileIdOrPersistentId, DS_VERSION_LATEST, includeDeaccessioned, returnDatasetVersion, uriInfo, headers), getRequestUser(crc)); } @GET @@ -501,15 +499,17 @@ public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("datasetVersionId") String datasetVersionId, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @QueryParam("returnDatasetVersion") boolean returnDatasetVersion, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> getFileDataResponse(req, fileIdOrPersistentId, datasetVersionId, includeDeaccessioned, uriInfo, headers), getRequestUser(crc)); + return response( req -> getFileDataResponse(req, fileIdOrPersistentId, datasetVersionId, includeDeaccessioned, returnDatasetVersion, uriInfo, headers), getRequestUser(crc)); } private Response getFileDataResponse(final DataverseRequest req, String fileIdOrPersistentId, String datasetVersionId, boolean includeDeaccessioned, + boolean returnDatasetVersion, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { final DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); @@ -546,7 +546,7 @@ public Command handleLatestPublished() { return Response.ok(Json.createObjectBuilder() .add("status", ApiConstants.STATUS_OK) - .add("data", json(fileMetadata)).build()) + .add("data", json(fileMetadata, returnDatasetVersion)).build()) .type(MediaType.APPLICATION_JSON) .build(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 93e214159cf..df93727a666 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -602,28 +602,38 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { */ public static JsonObjectBuilder json(FileMetadata fmd) { - return jsonObjectBuilder() + return json(fmd, false); + } + + public static JsonObjectBuilder json(FileMetadata fmd, boolean printDatasetVersion) { + NullSafeJsonBuilder builder = jsonObjectBuilder() // deprecated: .add("category", fmd.getCategory()) - // TODO: uh, figure out what to do here... it's deprecated - // in a sense that there's no longer the category field in the - // fileMetadata object; but there are now multiple, oneToMany file + // TODO: uh, figure out what to do here... it's deprecated + // in a sense that there's no longer the category field in the + // fileMetadata object; but there are now multiple, oneToMany file // categories - and we probably need to export them too!) -- L.A. 4.5 - // DONE: catgegories by name + // DONE: catgegories by name .add("description", fmd.getDescription()) .add("label", fmd.getLabel()) // "label" is the filename - .add("restricted", fmd.isRestricted()) + .add("restricted", fmd.isRestricted()) .add("directoryLabel", fmd.getDirectoryLabel()) .add("version", fmd.getVersion()) .add("datasetVersionId", fmd.getDatasetVersion().getId()) .add("categories", getFileCategories(fmd)) .add("dataFile", JsonPrinter.json(fmd.getDataFile(), fmd, false)); + + if (printDatasetVersion) { + builder.add("datasetVersion", json(fmd.getDatasetVersion(), false)); + } + + return builder; } - public static JsonObjectBuilder json(AuxiliaryFile auxFile) { + public static JsonObjectBuilder json(AuxiliaryFile auxFile) { return jsonObjectBuilder() .add("formatTag", auxFile.getFormatTag()) .add("formatVersion", auxFile.getFormatVersion()) // "label" is the filename - .add("origin", auxFile.getOrigin()) + .add("origin", auxFile.getOrigin()) .add("isPublic", auxFile.getIsPublic()) .add("type", auxFile.getType()) .add("contentType", auxFile.getContentType()) @@ -631,6 +641,7 @@ public static JsonObjectBuilder json(AuxiliaryFile auxFile) { .add("checksum", auxFile.getChecksum()) .add("dataFile", JsonPrinter.json(auxFile.getDataFile())); } + public static JsonObjectBuilder json(DataFile df) { return JsonPrinter.json(df, null, false); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index d436b4129c4..125240b76b7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1530,65 +1530,65 @@ public void testGetFileInfo() { deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // Superuser should get to see file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameSecondUpdate)) .statusCode(OK.getStatusCode()); // Superuser should get to see file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameSecondUpdate)) .statusCode(OK.getStatusCode()); // Superuser should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is false - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, false); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, false, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); // Superuser should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is false - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, false); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, false, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); // Superuser should get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, "3.0", true); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, "3.0", true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameSecondUpdate)) .statusCode(OK.getStatusCode()); // Superuser should not get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is false - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, "3.0", false); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, "3.0", false, false); getFileDataResponse.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); // Regular user should get to see version 2.0 file data if the latest version is deaccessioned filtering by latest published and includeDeaccessioned is false - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, false); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, false, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); // Regular user should not get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "3.0", true); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "3.0", true, false); getFileDataResponse.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // Regular user should not get to see file data from specific deaccessioned version filtering by tag and includeDeaccessioned is false - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "3.0", false); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "3.0", false, false); getFileDataResponse.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); @@ -1600,25 +1600,25 @@ public void testGetFileInfo() { updateFileMetadataResponse.then().statusCode(OK.getStatusCode()); // Superuser should get to see draft file data if draft exists filtering by latest and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameThirdUpdate)) .statusCode(OK.getStatusCode()); // Superuser should get to see latest published file data if draft exists filtering by latest published and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST_PUBLISHED, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameSecondUpdate)) .statusCode(OK.getStatusCode()); // Regular user should get to see version 2.0 file data if the latest version is deaccessioned and draft exists filtering by latest and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); // Regular user should get to see version 2.0 file data if the latest version is deaccessioned and draft exists filtering by latest published and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameFirstUpdate)) .statusCode(OK.getStatusCode()); @@ -1629,17 +1629,30 @@ public void testGetFileInfo() { .statusCode(OK.getStatusCode()); // Regular user should get to see file data if the latest version is not deaccessioned filtering by latest and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameThirdUpdate)) .statusCode(OK.getStatusCode()); // Regular user should get to see file data if the latest version is not deaccessioned filtering by latest published and includeDeaccessioned is true - getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true); + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, DS_VERSION_LATEST_PUBLISHED, true, false); getFileDataResponse.then().assertThat() .body("data.label", equalTo(newFileNameThirdUpdate)) .statusCode(OK.getStatusCode()); + // The following tests cover cases where the user requests to include the dataset version information in the response + // User should get to see dataset version info in the response if returnDatasetVersion is true + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "1.0", false, true); + getFileDataResponse.then().assertThat() + .body("data.datasetVersion.versionState", equalTo("RELEASED")) + .statusCode(OK.getStatusCode()); + + // User should not get to see dataset version info in the response if returnDatasetVersion is false + getFileDataResponse = UtilIT.getFileData(dataFileId, regularApiToken, "1.0", false, false); + getFileDataResponse.then().assertThat() + .body("data.datasetVersion", equalTo(null)) + .statusCode(OK.getStatusCode()); + // Cleanup Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, superUserApiToken); destroyDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 410401514b1..9d728688f5f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1092,13 +1092,14 @@ static Response getFileData(String fileId, String apiToken) { } static Response getFileData(String fileId, String apiToken, String datasetVersionId) { - return getFileData(fileId, apiToken, datasetVersionId, false); + return getFileData(fileId, apiToken, datasetVersionId, false, false); } - static Response getFileData(String fileId, String apiToken, String datasetVersionId, boolean includeDeaccessioned) { + static Response getFileData(String fileId, String apiToken, String datasetVersionId, boolean includeDeaccessioned, boolean returnDatasetVersion) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .queryParam("includeDeaccessioned", includeDeaccessioned) + .queryParam("returnDatasetVersion", returnDatasetVersion) .get("/api/files/" + fileId + "/versions/" + datasetVersionId); } From d4eedc2288f35a8be8ce25f8e48d20fac85aecdb Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 19 Feb 2024 12:01:00 +0000 Subject: [PATCH 0703/1112] Added: extended docs for Get JSON Representation of a File --- doc/sphinx-guides/source/api/native-api.rst | 38 +++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4038ec4340d..3a0731d8c3f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2724,6 +2724,8 @@ Get JSON Representation of a File .. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. +This endpoint returns the file metadata present in the latest dataset version. + Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB*: .. code-block:: bash @@ -2790,6 +2792,42 @@ The fully expanded example above (without environment variables) looks like this The file id can be extracted from the response retrieved from the API which uses the persistent identifier (``/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER``). +By default, files from deaccessioned dataset versions are not included in the search. If no accessible dataset draft version exists, the search of the latest published file will ignore dataset deaccessioned versions unless ``includeDeaccessioned`` query parameter is set to ``true``. + +Usage example: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER&includeDeaccessioned=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB&includeDeaccessioned=true" + +If you want to include the dataset version of the file in the response, there is an optional parameter for this called ``returnDatasetVersion`` whose default value is ``false``. + +Usage example: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER&returnDatasetVersion=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB&returnDatasetVersion=true" + Adding Files ~~~~~~~~~~~~ From ab60747d339aeef63cb4100d4407deec30f7aef5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 19 Feb 2024 12:36:34 +0000 Subject: [PATCH 0704/1112] Added: docs for Get JSON Representation of a File given a Dataset Version --- doc/sphinx-guides/source/api/native-api.rst | 64 +++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 3a0731d8c3f..3d33be1ca45 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2828,6 +2828,70 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB&returnDatasetVersion=true" +Get JSON Representation of a File given a Dataset Version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. + +This endpoint returns the file metadata present in the requested dataset version. To specify the dataset version, you can use ``:latest-published``, or ``:latest``, or ``:draft`` or ``1.0`` or any other style listed under :ref:`dataset-version-specifiers`. + +Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* present in the published dataset version ``1.0``: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + export DATASET_VERSION=1.0 + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/versions/$DATASET_VERSION?persistentId=$PERSISTENT_IDENTIFIER" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/versions/1.0?persistentId=doi:10.5072/FK2/J8SJZB" + +You may obtain a not found error depending on whether or not the specified version exists or you have permission to view it. + +By default, files from deaccessioned dataset versions are not included in the search unless ``includeDeaccessioned`` query parameter is set to ``true``. + +Usage example: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + export DATASET_VERSION=:latest-published + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/versions/$DATASET_VERSION?persistentId=$PERSISTENT_IDENTIFIER&includeDeaccessioned=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/versions/:latest-published?persistentId=doi:10.5072/FK2/J8SJZB&includeDeaccessioned=true" + +If you want to include the dataset version of the file in the response, there is an optional parameter for this called ``returnDatasetVersion`` whose default value is ``false``. + +Usage example: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + export DATASET_VERSION=:draft + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/versions/$DATASET_VERSION?persistentId=$PERSISTENT_IDENTIFIER&returnDatasetVersion=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB&returnDatasetVersion=true" + Adding Files ~~~~~~~~~~~~ From e5dbfa1510950bc8b0cb37bbf226cff0722938c8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 19 Feb 2024 12:44:28 +0000 Subject: [PATCH 0705/1112] Added: release notes for #10280 --- doc/release-notes/10280-get-file-api-extension.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 doc/release-notes/10280-get-file-api-extension.md diff --git a/doc/release-notes/10280-get-file-api-extension.md b/doc/release-notes/10280-get-file-api-extension.md new file mode 100644 index 00000000000..fcca0afd78b --- /dev/null +++ b/doc/release-notes/10280-get-file-api-extension.md @@ -0,0 +1,8 @@ +The API endpoint `api/files/{id}` has been extended to support the following optional query parameters: + +- `includeDeaccessioned`: Indicates whether or not to consider deaccessioned dataset versions in the latest file search. (Default: `false`). +- `returnDatasetVersion`: Indicates whether or not to include the dataset version of the file in the response. (Default: `false`). + +A new endpoint `api/files/{id}/versions/{datasetVersionId}` has been created. This endpoint returns the file metadata present in the requested dataset version. To specify the dataset version, you can use ``:latest-published``, or ``:latest``, or ``:draft`` or ``1.0`` or any other available version identifier. + +The endpoint supports the `includeDeaccessioned` and `returnDatasetVersion` optional query parameters, as does the `api/files/{id}` endpoint. From 084fa3219a7bbe609f6180eba50f380d9a450247 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 08:25:48 +0100 Subject: [PATCH 0706/1112] chore(test): remove leftover JUnit 4 rules --- src/test/java/org/junit/rules/TestRule.java | 11 ----------- src/test/java/org/junit/runners/model/Statement.java | 11 ----------- 2 files changed, 22 deletions(-) delete mode 100644 src/test/java/org/junit/rules/TestRule.java delete mode 100644 src/test/java/org/junit/runners/model/Statement.java diff --git a/src/test/java/org/junit/rules/TestRule.java b/src/test/java/org/junit/rules/TestRule.java deleted file mode 100644 index 4f94d8e6922..00000000000 --- a/src/test/java/org/junit/rules/TestRule.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.junit.rules; - -/** - * "Fake" class used as a replacement for Junit4-dependent classes. - * See more at: - * GenericContainer run from Jupiter tests shouldn't require JUnit 4.x library on runtime classpath - * . - */ -@SuppressWarnings("unused") -public interface TestRule { -} diff --git a/src/test/java/org/junit/runners/model/Statement.java b/src/test/java/org/junit/runners/model/Statement.java deleted file mode 100644 index b80ca0abc86..00000000000 --- a/src/test/java/org/junit/runners/model/Statement.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.junit.runners.model; - -/** - * "Fake" class used as a replacement for Junit4-dependent classes. - * See more at: - * GenericContainer run from Jupiter tests shouldn't require JUnit 4.x library on runtime classpath - * . - */ -@SuppressWarnings("unused") -public class Statement { -} From 4d3904f66f20bc78a0fba718557543fa694280a2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 08:30:15 +0100 Subject: [PATCH 0707/1112] test(mail): verify SMTP over SSL/TLS works Adding an integration test with self-signed certificates to enable verification SMTP over SSL works. --- .../dataverse/util/MailSessionProducerIT.java | 60 +++++++++++++++++++ src/test/resources/mail/cert.pem | 24 ++++++++ src/test/resources/mail/key.pem | 28 +++++++++ 3 files changed, 112 insertions(+) create mode 100644 src/test/resources/mail/cert.pem create mode 100644 src/test/resources/mail/key.pem diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java b/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java index 8280578a343..c4893652153 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java @@ -23,6 +23,7 @@ import org.testcontainers.containers.wait.strategy.Wait; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.MountableFile; import java.util.Map; @@ -118,6 +119,65 @@ void createSession() { } + @Nested + @LocalJvmSettings + @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, method = "tcSmtpHost") + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpPort", varArgs = "port") + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, varArgs = "ssl.enable", value = "true") + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, varArgs = "ssl.trust", value = "*") + class WithSSLWithoutAuthentication { + @Container + static GenericContainer maildev = new GenericContainer<>("maildev/maildev:2.1.0") + .withCopyFileToContainer(MountableFile.forClasspathResource("mail/cert.pem"), "/cert.pem") + .withCopyFileToContainer(MountableFile.forClasspathResource("mail/key.pem"), "/key.pem") + .withExposedPorts(PORT_HTTP, PORT_SMTP) + .withEnv(Map.of( + "MAILDEV_INCOMING_SECURE", "true", + "MAILDEV_INCOMING_CERT", "/cert.pem", + "MAILDEV_INCOMING_KEY", "/key.pem" + )) + .waitingFor(Wait.forHttp("/")); + + static String tcSmtpHost() { + return maildev.getHost(); + } + + static String tcSmtpPort() { + return maildev.getMappedPort(PORT_SMTP).toString(); + } + + @BeforeAll + static void setup() { + RestAssured.baseURI = "http://" + tcSmtpHost(); + RestAssured.port = maildev.getMappedPort(PORT_HTTP); + } + + @Test + void createSession() { + given().when().get("/email") + .then() + .statusCode(200) + .body("size()", is(0)); + + // given + Session session = new MailSessionProducer().getSession(); + MailServiceBean mailer = new MailServiceBean(session, settingsServiceBean); + + // when + boolean sent = mailer.sendSystemEmail("test@example.org", "Test", "Test", false); + + // then + assertTrue(sent); + //RestAssured.get("/email").body().prettyPrint(); + given().when().get("/email") + .then() + .statusCode(200) + .body("size()", is(1)) + .body("[0].subject", equalTo("Test")); + } + + } + static final String username = "testuser"; static final String password = "supersecret"; diff --git a/src/test/resources/mail/cert.pem b/src/test/resources/mail/cert.pem new file mode 100644 index 00000000000..6115183d413 --- /dev/null +++ b/src/test/resources/mail/cert.pem @@ -0,0 +1,24 @@ +-----BEGIN CERTIFICATE----- +MIIEFTCCAv0CFAIjr/AvBVg4EX5/rk5+eFdfsquOMA0GCSqGSIb3DQEBCwUAMIHG +MQswCQYDVQQGEwJEVjEaMBgGA1UECAwRRGF0YXZlcnNlIENvdW50cnkxFzAVBgNV +BAcMDkRhdGF2ZXJzZSBDaXR5MS4wLAYDVQQKDCVHbG9iYWwgRGF0YXZlcnNlIENv +bW11bml0eSBDb25zb3J0aXVtMRswGQYDVQQLDBJUZXN0aW5nIERlcGFydG1lbnQx +FDASBgNVBAMMC2V4YW1wbGUub3JnMR8wHQYJKoZIhvcNAQkBFhB0ZXN0QGV4YW1w +bGUub3JnMB4XDTI0MDIyMDA3MTkxOVoXDTM0MDIxNzA3MTkxOVowgcYxCzAJBgNV +BAYTAkRWMRowGAYDVQQIDBFEYXRhdmVyc2UgQ291bnRyeTEXMBUGA1UEBwwORGF0 +YXZlcnNlIENpdHkxLjAsBgNVBAoMJUdsb2JhbCBEYXRhdmVyc2UgQ29tbXVuaXR5 +IENvbnNvcnRpdW0xGzAZBgNVBAsMElRlc3RpbmcgRGVwYXJ0bWVudDEUMBIGA1UE +AwwLZXhhbXBsZS5vcmcxHzAdBgkqhkiG9w0BCQEWEHRlc3RAZXhhbXBsZS5vcmcw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCzQ55QKM/sVJMb9c5MKtc/ +YW3+MlCrCnGlo42DCjl6noZg8Gji4dOEMo29UcRtYqhOsx7HOXZ5ulj3YKiBfzht ++QV/ZofhMIN9F/N5XCi4MRPorFz+mPck5NDzH1SqYn5zGm5APPqFJlwBWxDKEfqe +6ir5gG91MzHHuJJSQq3nrSDq+/DXRwg/7L2O7da6pBqti7nYU0T5ql88nddkRhR8 +7NdeZndI+UVmkcnal/3ZpybW8ZNzpiP8nCJO3ASz9kXRC3cITS0zgKxl6USDZs+8 +NAM6R0r8icB89L+i8bOfbyU7nkN9T+xUTTOmalSmsYrMIedIBmcB7NuqbXPLEpeJ +AgMBAAEwDQYJKoZIhvcNAQELBQADggEBAA4U/uhswbeJB0gX4vfVqYf30A131Rvu +J4eaVrVLzuByP1R0MvbBCMMYZBlDVDhiFqRh4KdoVWBvTfxf/4McYZ1FhXkgRlOb +mv/mxVBqnXEu5msviApYmoLzMqgd91F3T4CWs66QIWVTJYh2McRKLG0+IfGp3aox +YKC/W2RPsUO2fKFnUDkYetXMuWg1KJYKuqE6u2lcoV3uHFphXplClnlwN+IwtWWY +cgfNBBRpwx6RXTk2XXgpCKYRBthBu1rowp7qiAwX7R5am6wDx0EIbevfR32bDReX +oAV8c9soJWwAUwH63jqq7KTO8Dg1oGHveZMk4HHGkCqZeGCjbDPaak4= +-----END CERTIFICATE----- diff --git a/src/test/resources/mail/key.pem b/src/test/resources/mail/key.pem new file mode 100644 index 00000000000..84d34efdce8 --- /dev/null +++ b/src/test/resources/mail/key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCzQ55QKM/sVJMb +9c5MKtc/YW3+MlCrCnGlo42DCjl6noZg8Gji4dOEMo29UcRtYqhOsx7HOXZ5ulj3 +YKiBfzht+QV/ZofhMIN9F/N5XCi4MRPorFz+mPck5NDzH1SqYn5zGm5APPqFJlwB +WxDKEfqe6ir5gG91MzHHuJJSQq3nrSDq+/DXRwg/7L2O7da6pBqti7nYU0T5ql88 +nddkRhR87NdeZndI+UVmkcnal/3ZpybW8ZNzpiP8nCJO3ASz9kXRC3cITS0zgKxl +6USDZs+8NAM6R0r8icB89L+i8bOfbyU7nkN9T+xUTTOmalSmsYrMIedIBmcB7Nuq +bXPLEpeJAgMBAAECggEAQ3h3TQ9XVslsRxFIsLVNJ49JoWuZng7DwIai3AfMo4Cn +7jN+HqrFfBO08mUkq9D+rQRQ2MYhd+Zx1sXcFkVmXUnlTlKuYMzsKHiLzIkp0E20 +gxXguHilSI8Qr/kCWlDQ7AyuI2JwHg5WgbIfSxbiP86+FwNGsBNxMI0hEXIEV1ZY +OFXO6AWO63D4zwbwMT30k8cjfyjGvjEtoGmjnBJcrJLSADCIWLcFCw+Cm8vcRkCd +BEpfRzeEos/NVdOqCpi1ea3OkGAY94mXxz6gaFRbeJFj9b6st7oVZLBOiMx1eafH +hgB9JkfVtDogl9B13MkqRN8WAiOgAjIo2Ukq8x1ZkwKBgQD88sdh8k1eldO9UXG1 +BjEsB2mEnzp1hvjuRlMQtnvOjDakbqozzbNQlq9YJxocphLyUPM/BKTsIGp0SPpd +vo0lgspDJ5eLnHd/Xf/guYvKg90NsHZR6V7hf9Z4JcrwrwvXpf7Lp/m95Jwd930j +/kPXw25gRFmpJ8Q9ciIk0PF0NwKBgQC1bUTK8iarZHhDGnR+/AhjkfSnb0z725Qb +w7MYRvicRNWT0wnk3njMMfXYS0rbxw7O5LlSoyCf+n6dGtHqJWCS1+lYuCjCz1vr +hMVFbpcEhob0OAhg8YMgzQRsmeJcBm8slVEOrmmVhQQZPRBjAaQw2f6cjW/ZhzZd +JHSiDw3yPwKBgQDLSleB2Zni3al56v3mzh4w05gzVUFHeX2RCoXx1ad1He1AhAxY +bAakSyaLQ4nR4osxomuMhzAA8iB8araFJwMLVa03AZfjRZIolCR0uMqnrQi42syN +EnEF7JcyorUScKyk2S0JAmxN+HCcCO7TQaPGwbNwvR4OO/6Un6jfS+nySwKBgH6n +4bashkJwyWRPO7TKzjB03I9nLB9Hk4YugQEZysWNaGzij62vgjVLS43MQl5cAQJ+ +usHuEACfJ3UWHCWSInFhOg4twob9q/YnonBuXA9UuzITTAYhlKF5fvUyGMyV0VcW +hpfxOtSfH9Vew+naY32XMiCovMTnmBQ+Nw5L5DiRAoGAV5/JT4z57Y+8npBCRr1m +NJZBXjQ8rmjYBCs+jOQ48wK2mEgcgARIgVGgi9MZZ2BUFHPThGS1o4OYE+fdqD95 +bvg1XInVpNwebLP6UZa9xZ8oGd3Auxfsav1WJB+CZo2tOX5Qt+GnwiumEr3Dlf1d +UVXDNM5A/sl1IDL3T3IEdSw= +-----END PRIVATE KEY----- From 53e964ae68b227793cde00774108adeef586eebb Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 08:30:50 +0100 Subject: [PATCH 0708/1112] style(mail): update deprecation tags for DV v6.2 --- .../harvard/iq/dataverse/settings/SettingsServiceBean.java | 2 +- .../edu/harvard/iq/dataverse/util/MailSessionProducer.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 45189ac6c3a..63566b62395 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -232,7 +232,7 @@ public enum Key { * @deprecated Please replace usages with {@link edu.harvard.iq.dataverse.MailServiceBean#getSystemAddress}, * which is backward compatible with this setting. */ - @Deprecated(since = "6.1", forRemoval = true) + @Deprecated(since = "6.2", forRemoval = true) SystemEmail, /* size limit for Tabular data file ingests */ /* (can be set separately for specific ingestable formats; in which diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java index 25f5970274e..13fedb94014 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java @@ -49,7 +49,7 @@ public class MailSessionProducer { * No direct JNDI lookup on the field to avoid deployment failures when not present. * @deprecated This should be removed with the next major release of Dataverse, as it would be a breaking change. */ - @Deprecated(forRemoval = true, since = "6.1") + @Deprecated(forRemoval = true, since = "6.2") Session appserverProvidedSession; public MailSessionProducer() { @@ -124,7 +124,7 @@ Properties getMailProperties() { * @return True if injected as resource from app server, false otherwise * @deprecated This is supposed to be removed when {@link #appserverProvidedSession} is removed. */ - @Deprecated(forRemoval = true, since = "6.1") + @Deprecated(forRemoval = true, since = "6.2") public boolean hasSessionFromAppServer() { return this.appserverProvidedSession != null; } From abcb131e79bd7f16ee8e003b8d7bf2e33f3e0259 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 08:32:11 +0100 Subject: [PATCH 0709/1112] style(settings): ignore SonarCube rule S115 for DB settings The DB settings names are not compliant with usual Java enum name rules. Ignoring to avoid unnecessary clutter, hiding more important problems. --- .../edu/harvard/iq/dataverse/settings/SettingsServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 63566b62395..864307d536f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -45,6 +45,7 @@ public class SettingsServiceBean { * over your shoulder when typing strings in various places of a large app. * So there. */ + @SuppressWarnings("java:S115") public enum Key { AllowApiTokenLookupViaApi, /** From b0d268d281331549f5c8b9ef17f760131686d079 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 10:39:06 +0100 Subject: [PATCH 0710/1112] doc(settings): add section on secure password storage in security section The section about securing your installation was missing hints about how to store and access passwords in a safe manner. Now having a single place to reference from everywhere makes the config bits for passwords much more readable, as we do not need to provide as many examples. --- .../source/installation/config.rst | 121 ++++++++++-------- 1 file changed, 71 insertions(+), 50 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index c233e594fa7..32c61009524 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -88,6 +88,51 @@ See the :ref:`payara` section of :doc:`prerequisites` for details and init scrip Related to this is that you should remove ``/root/.payara/pass`` to ensure that Payara isn't ever accidentally started as root. Without the password, Payara won't be able to start as root, which is a good thing. +.. _secure-password-storage: + +Secure Password Storage +^^^^^^^^^^^^^^^^^^^^^^^ + +In development or demo scenarios, we suggest not to store passwords in files permanently. +We recommend the use of at least environment variables or production-grade mechanisms to supply passwords. + +In a production setup, permanently storing passwords as plaintext should be avoided at all cost. +Environment variables are dangerous in shared environments and containers, as they may be easily exploited; we suggest not to use them. +Depending on your deployment model and environment, you can make use of the following techniques to securely store and access passwords. + +**Password Aliases** + +A `password alias`_ allows you to have a plaintext reference to an encrypted password stored on the server, with the alias being used wherever the password is needed. +This method is especially useful in a classic deployment, as it does not require any external secrets management. + +Password aliases are consumable as a MicroProfile Config source and can be referrenced by their name in a `property expression`_. +You may also reference them within a `variable substitution`_, e.g. in your ``domain.xml``. + +Creation example for an alias named *my.alias.name*: + +.. code-block:: shell + + echo "AS_ADMIN_ALIASPASSWORD=changeme" > /tmp/p.txt + asadmin create-password-alias --passwordfile "/tmp/p.txt" "my.alias.name" + rm /tmp/p.txt + +Note: omitting the ``--passwordfile`` parameter allows creating the alias in an interactive fashion with a prompt. + +**Secrets Files** + +Payara has a builtin MicroProfile Config source to consume values from files in a directory on your filesystem. +This `directory config source`_ is most useful and secure with external secrets management in place, temporarily mounting cleartext passwords as files. +Examples are Kubernetes / OpenShift `Secrets `_ or tools like `Vault Agent `_. + +Please follow the `directory config source`_ documentation to learn about its usage. + +**Cloud Providers** + +Running Dataverse on a cloud platform or running an external secret management system like `Vault `_ enables accessing secrets without any intermediate storage of cleartext. +Obviously this is the most secure option for any deployment model, but it may require more resources to set up and maintain - your mileage may vary. + +Take a look at `cloud sources`_ shipped with Payara to learn about their usage. + Enforce Strong Passwords for User Accounts ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -365,16 +410,8 @@ Basic Database Settings 1. Any of these settings can be set via system properties (see :ref:`jvm-options` starting at :ref:`dataverse.db.name`), environment variables or other MicroProfile Config mechanisms supported by the app server. `See Payara docs for supported sources `_. -2. Remember to protect your secrets. For passwords, use an environment variable (bare minimum), a password alias named the same - as the key (OK) or use the "dir config source" of Payara (best). - - Alias creation example: - - .. code-block:: shell - - echo "AS_ADMIN_ALIASPASSWORD=changeme" > /tmp/p.txt - asadmin create-password-alias --passwordfile /tmp/p.txt dataverse.db.password - rm /tmp/p.txt +2. Remember to protect your secrets. + See :ref:`secure-password-storage` for more information. 3. Environment variables follow the key, replacing any dot, colon, dash, etc. into an underscore "_" and all uppercase letters. Example: ``dataverse.db.host`` -> ``DATAVERSE_DB_HOST`` @@ -603,6 +640,8 @@ Then create a password alias by running (without changes): The second command will trigger an interactive prompt asking you to input your Swift password. +Note: you may choose a different way to secure this password, depending on your use case. See :ref:`secure-password-storage` for more options. + Second, update the JVM option ``dataverse.files.storage-driver-id`` by running the delete command: ``./asadmin $ASADMIN_OPTS delete-jvm-options "\-Ddataverse.files.storage-driver-id=file"`` @@ -872,9 +911,8 @@ Optionally, you may provide static credentials for each S3 storage using MicroPr You may provide the values for these via any `supported MicroProfile Config API source`_. **WARNING:** - *For security, do not use the sources "environment variable" or "system property" (JVM option) in a production context!* -*Rely on password alias, secrets directory or cloud based sources instead!* +*Rely on password alias, secrets directory or cloud based sources as described at* :ref:`secure-password-storage` *instead!* **NOTE:** @@ -1946,15 +1984,9 @@ dataverse.db.password The PostgreSQL users password to connect with. -Preferrably use a JVM alias, as passwords in environment variables aren't safe. - -.. code-block:: shell - - echo "AS_ADMIN_ALIASPASSWORD=change-me-super-secret" > /tmp/password.txt - asadmin create-password-alias --passwordfile /tmp/password.txt dataverse.db.password - rm /tmp/password.txt +See :ref:`secure-password-storage` to learn about options to securely store this password. -Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_DB_PASSWORD``. +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_DB_PASSWORD`` (although you shouldn't use environment variables for passwords). dataverse.db.host +++++++++++++++++ @@ -2201,14 +2233,7 @@ Once you have a username from DataCite, you can enter it like this: dataverse.pid.datacite.password +++++++++++++++++++++++++++++++ -Once you have a password from your provider, you should create a password alias. -This avoids storing it in clear text, although you could use a JVM option `to reference -a different place `__. - -``./asadmin create-password-alias dataverse.pid.datacite.password`` - -It will allow you to enter the password while not echoing the characters. -To manage these, read up on `Payara docs about password aliases `__. +Once you have a password from your provider, you should create a password alias called *dataverse.pid.datacite.password* or use another method described at :ref:`secure-password-storage` to safeguard it. **Notes:** @@ -2219,7 +2244,7 @@ To manage these, read up on `Payara docs about password aliases `. Provide a passphrase to decrypt the :ref:`private key file `. +See :ref:`secure-password-storage` for ways to do this securely. The key file may (and should) be encrypted with a passphrase (used for encryption with AES-128). See also chapter 1.4 "Authentication" of the @@ -2260,10 +2286,10 @@ encryption with AES-128). See also chapter 1.4 "Authentication" of the Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_PID_HANDLENET_KEY_PASSPHRASE`` (although you shouldn't use -environment variables for passwords). This setting was formerly known as -``dataverse.handlenet.admprivphrase`` and has been renamed. You should delete -the old JVM option and the wrapped password alias, then recreate as shown for -:ref:`dataverse.pid.datacite.password` but with this option as alias name. +environment variables for passwords). + +This setting was formerly known as ``dataverse.handlenet.admprivphrase`` and has been renamed. +You should delete the old JVM option and the wrapped password alias, then recreate as shown for :ref:`dataverse.pid.datacite.password` but with this option as alias name. .. _dataverse.pid.handlenet.index: @@ -2457,20 +2483,11 @@ The key used to sign a URL is created from the API token of the creating user pl signature-secret makes it impossible for someone who knows an API token from forging signed URLs and provides extra security by making the overall signing key longer. -Since the signature-secret is sensitive, you should treat it like a password. Here is an example how to set your shared secret -with the secure method "password alias": +**WARNING**: +*Since the signature-secret is sensitive, you should treat it like a password.* +*See* :ref:`secure-password-storage` *to learn about ways to safeguard it.* -.. code-block:: shell - - echo "AS_ADMIN_ALIASPASSWORD=change-me-super-secret" > /tmp/password.txt - asadmin create-password-alias --passwordfile /tmp/password.txt dataverse.api.signature-secret - rm /tmp/password.txt - -Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable -``DATAVERSE_API_SIGNATURE_SECRET``. - -**WARNING:** For security, do not use the sources "environment variable" or "system property" (JVM option) in a -production context! Rely on password alias, secrets directory or cloud based sources instead! +Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_API_SIGNATURE_SECRET`` (although you shouldn't use environment variables for passwords) . .. _dataverse.api.allow-incomplete-metadata: @@ -4147,10 +4164,7 @@ A true(default)/false option determining whether datafiles listed on the dataset :AllowUserManagementOfOrder +++++++++++++++++++++++++++ -A true/false (default) option determining whether the dataset datafile table display includes checkboxes enabling users to turn folder ordering and/or category ordering (if an order is defined by :CategoryOrder) on and off dynamically. - -.. _supported MicroProfile Config API source: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html - +A true/false (default) option determining whether the dataset datafile table display includes checkboxes enabling users to turn folder ordering and/or category ordering (if an order is defined by :CategoryOrder) on and off dynamically. .. _:UseStorageQuotas: @@ -4173,3 +4187,10 @@ tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed file on the fly. The setting is ``false`` by default, preserving the legacy behavior. + +.. _supported MicroProfile Config API source: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html +.. _password alias: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Server%20Configuration%20And%20Management/Configuration%20Options/Password%20Aliases.html +.. _variable substitution: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Server%20Configuration%20And%20Management/Configuration%20Options/Variable%20Substitution/Usage%20of%20Variables.html +.. _property expression: https://download.eclipse.org/microprofile/microprofile-config-3.1/microprofile-config-spec-3.1.html#property-expressions +.. _directory config source: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Directory.html +.. _cloud sources: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Cloud/Overview.html \ No newline at end of file From ffd69e5cbf7f90f921e9d386c04fe3cfa062104e Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 20 Feb 2024 12:59:48 +0000 Subject: [PATCH 0711/1112] Added: #10280 release note tweak --- doc/release-notes/10280-get-file-api-extension.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/10280-get-file-api-extension.md b/doc/release-notes/10280-get-file-api-extension.md index fcca0afd78b..7ed70e93dc9 100644 --- a/doc/release-notes/10280-get-file-api-extension.md +++ b/doc/release-notes/10280-get-file-api-extension.md @@ -6,3 +6,5 @@ The API endpoint `api/files/{id}` has been extended to support the following opt A new endpoint `api/files/{id}/versions/{datasetVersionId}` has been created. This endpoint returns the file metadata present in the requested dataset version. To specify the dataset version, you can use ``:latest-published``, or ``:latest``, or ``:draft`` or ``1.0`` or any other available version identifier. The endpoint supports the `includeDeaccessioned` and `returnDatasetVersion` optional query parameters, as does the `api/files/{id}` endpoint. + +`api/files/{id}/draft` endpoint is no longer available in favor of the new endpoint `api/files/{id}/versions/{datasetVersionId}`, which can use the version identifier ``:draft`` (`api/files/{id}/versions/:draft`) to obtain the same result. From 7ff5d6a35647cb76d9fe98279dd9021952a9849b Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 20 Feb 2024 08:49:25 -0500 Subject: [PATCH 0712/1112] adding to test --- .../edu/harvard/iq/dataverse/util/json/JsonParser.java | 8 +++++--- .../edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 7 +++---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index ac7b6bb4067..4287cab069b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -70,6 +70,7 @@ public class JsonParser { SettingsServiceBean settingsService; LicenseServiceBean licenseService; HarvestingClient harvestingClient = null; + boolean allowHarvestingMissingCVV = false; /** * if lenient, we will accept alternate spellings for controlled vocabulary values @@ -93,6 +94,8 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB this.settingsService = settingsService; this.licenseService = licenseService; this.harvestingClient = harvestingClient; + this.allowHarvestingMissingCVV = harvestingClient != null && + settingsService.isTrueForKey(SettingsServiceBean.Key.AllowHarvestingMissingCVV, false); } public JsonParser() { @@ -739,10 +742,9 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar ret.setDatasetFieldType(type); // If Harvesting, CVV values may differ between the Dataverse installations, so we won't enforce them - if (harvestingClient != null && type.isControlledVocabulary() && - settingsService.isTrueForKey(SettingsServiceBean.Key.AllowHarvestingMissingCVV, false)) { + if (allowHarvestingMissingCVV && type.isControlledVocabulary()) { type.setAllowControlledVocabulary(false); - logger.warning("Harvesting: Skipping Controlled Vocabulary. Treating values as primitives"); + logger.info("Harvesting: Skipping Controlled Vocabulary. Treating values as primitives"); } if (type.isCompound()) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 9b83c4c1c9a..375eb92a6ab 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -3,8 +3,7 @@ import java.util.logging.Logger; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.*; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -19,7 +18,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.jupiter.api.BeforeAll; /** * This class tests Harvesting Client functionality. @@ -29,6 +27,7 @@ * /api/harvest/clients/ api to run an actual harvest of a control set and * then validate the resulting harvested content. */ +@TestMethodOrder(MethodOrderer.MethodName.class) public class HarvestingClientsIT { private static final Logger logger = Logger.getLogger(HarvestingClientsIT.class.getCanonicalName()); @@ -178,7 +177,7 @@ public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws In } @Test public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { - harvestingClientRun(false); + harvestingClientRun(true); } private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws InterruptedException { From f690c47100f216810133ca308ea147dc6ad93ee1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 15:43:07 +0100 Subject: [PATCH 0713/1112] feat(installer): make installer use new way to apply mail MTA config Instead of setting a DB setting, we now simply apply system properties. Also, aligned with the way the "from" address is now bound to be the system mail address, this commit removes this subtle difference in the installer as well. --- scripts/installer/as-setup.sh | 16 ++++++++++------ scripts/installer/install.py | 8 -------- scripts/installer/installAppServer.py | 5 +++-- 3 files changed, 13 insertions(+), 16 deletions(-) diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh index fc5b378cff5..f169dfa5333 100755 --- a/scripts/installer/as-setup.sh +++ b/scripts/installer/as-setup.sh @@ -146,12 +146,10 @@ function final_setup(){ # delete any existing mail/notifyMailSession; configure port, if provided: ./asadmin delete-javamail-resource mail/notifyMailSession - - if [ $SMTP_SERVER_PORT"x" != "x" ] - then - ./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" --property mail.smtp.port="${SMTP_SERVER_PORT}" mail/notifyMailSession - else - ./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" mail/notifyMailSession + ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.mail.system-email='${ADMIN_EMAIL}'" + ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.mail.mta.host='${SMTP_SERVER}'" + if [ "x${SMTP_SERVER_PORT}" != "x" ]; then + ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.mail.mta.port='${SMTP_SERVER_PORT}'" fi } @@ -279,6 +277,12 @@ if [ ! -d "$DOMAIN_DIR" ] exit 2 fi +if [ -z "$ADMIN_EMAIL" ] + then + echo "You must specify the system admin email address (ADMIN_EMAIL)." + exit 1 +fi + echo "Setting up your app. server (Payara) to support Dataverse" echo "Payara directory: "$GLASSFISH_ROOT echo "Domain directory: "$DOMAIN_DIR diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 18995695638..2bad29c780e 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -568,14 +568,6 @@ except: sys.exit("Failure to execute setup-all.sh! aborting.") -# 7b. configure admin email in the application settings -print("configuring system email address...") -returnCode = subprocess.call(["curl", "-X", "PUT", "-d", adminEmail, apiUrl+"/admin/settings/:SystemEmail"]) -if returnCode != 0: - print("\nWARNING: failed to configure the admin email in the Dataverse settings!") -else: - print("\ndone.") - # 8c. configure remote Solr location, if specified if solrLocation != "LOCAL": print("configuring remote Solr location... ("+solrLocation+")") diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py index 698f5ba9a58..7636490c583 100644 --- a/scripts/installer/installAppServer.py +++ b/scripts/installer/installAppServer.py @@ -6,8 +6,9 @@ def runAsadminScript(config): # commands to set up all the app. server (payara6) components for the application. # All the parameters must be passed to that script as environmental # variables: - os.environ['GLASSFISH_DOMAIN'] = "domain1"; - os.environ['ASADMIN_OPTS'] = ""; + os.environ['GLASSFISH_DOMAIN'] = "domain1" + os.environ['ASADMIN_OPTS'] = "" + os.environ['ADMIN_EMAIL'] = config.get('system','ADMIN_EMAIL') os.environ['HOST_ADDRESS'] = config.get('glassfish','HOST_DNS_ADDRESS') os.environ['GLASSFISH_ROOT'] = config.get('glassfish','GLASSFISH_DIRECTORY') From 98244256529959a37b98986226ad71f4cc2b9bcf Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 15:46:12 +0100 Subject: [PATCH 0714/1112] doc(mail): add mail config paragraphs #7424 --- .../source/installation/config.rst | 154 ++++++++++++++++-- 1 file changed, 142 insertions(+), 12 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 32c61009524..1d23f9a1277 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2520,13 +2520,37 @@ See :ref:`discovery-sign-posting` for details. Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_SIGNPOSTING_LEVEL1_ITEM_LIMIT``. +.. _systemEmail: +.. _dataverse.mail.system-email: + +dataverse.mail.system-email ++++++++++++++++++++++++++++ + +This is the email address that "system" emails are sent from such as password reset links, notifications, etc. +It replaces the database setting :ref:`legacySystemEmail` since Dataverse 6.2. + +**WARNING**: Your Dataverse installation will not send mail without this setting in place. + +Note that only the email address is required, which you can supply without the ``<`` and ``>`` signs, but if you include the text, it's the way to customize the name of your support team, which appears in the "from" address in emails as well as in help text in the UI. +If you don't include the text, the installation name (see :ref:`Branding Your Installation`) will appear in the "from" address. +In case you want your system email address to of no-reply style, have a look at :ref:`dataverse.mail.support-email` setting, too. + +Please note that if you're having any trouble sending email, you can refer to "Troubleshooting" under :doc:`installation-main`. + +Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_MAIL_SYSTEM_EMAIL``. + +.. _dataverse.mail.support-email: + dataverse.mail.support-email ++++++++++++++++++++++++++++ -This provides an email address distinct from the :ref:`systemEmail` that will be used as the email address for Contact Forms and Feedback API. This address is used as the To address when the Contact form is launched from the Support entry in the top navigation bar and, if configured via :ref:`dataverse.mail.cc-support-on-contact-email`, as a CC address when the form is launched from a Dataverse/Dataset Contact button. -This allows configuration of a no-reply email address for :ref:`systemEmail` while allowing feedback to go to/be cc'd to the support email address, which would normally accept replies. If not set, the :ref:`systemEmail` is used for the feedback API/contact form email. +This provides an email address distinct from the :ref:`systemEmail` that will be used as the email address for Contact Forms and Feedback API. +This address is used as the To address when the Contact form is launched from the Support entry in the top navigation bar and, if configured via :ref:`dataverse.mail.cc-support-on-contact-email`, as a CC address when the form is launched from a Dataverse/Dataset Contact button. +This allows configuration of a no-reply email address for :ref:`systemEmail` while allowing feedback to go to/be cc'd to the support email address, which would normally accept replies. +If not set, the :ref:`systemEmail` is used for the feedback API/contact form email. -Note that only the email address is required, which you can supply without the ``<`` and ``>`` signs, but if you include the text, it's the way to customize the name of your support team, which appears in the "from" address in emails as well as in help text in the UI. If you don't include the text, the installation name (see :ref:`Branding Your Installation`) will appear in the "from" address. +Note that only the email address is required, which you can supply without the ``<`` and ``>`` signs, but if you include the text, it's the way to customize the name of your support team, which appears in the "from" address in emails as well as in help text in the UI. +If you don't include the text, the installation name (see :ref:`Branding Your Installation`) will appear in the "from" address. Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_MAIL_SUPPORT_EMAIL``. @@ -2535,12 +2559,123 @@ Can also be set via any `supported MicroProfile Config API source`_, e.g. the en dataverse.mail.cc-support-on-contact-email ++++++++++++++++++++++++++++++++++++++++++ -If this setting is true, the contact forms and feedback API will cc the system (:SupportEmail if set, :SystemEmail if not) when sending email to the collection, dataset, or datafile contacts. +If this boolean setting is true, the contact forms and feedback API will cc the system (``dataverse.mail.support-mail`` if set, ``dataverse.mail.system-email`` if not) when sending email to the collection, dataset, or datafile contacts. A CC line is added to the contact form when this setting is true so that users are aware that the cc will occur. The default is false. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_MAIL_CC_SUPPORT_ON_CONTACT_EMAIL``. +dataverse.mail.debug +++++++++++++++++++++ + +When this boolean setting is true, sending an email will generate more verbose logging, enabling you to analyze mail delivery malfunctions. +Defaults to ``false``. + +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_MAIL_DEBUG``. + +.. _dataverse.mail.mta: + +dataverse.mail.mta.* +++++++++++++++++++++ + +The following options allow you to configure a target Mail Transfer Agent (MTA) to be used for sending emails to users. +Be advised: as the mail server connection (session) is cached once created, you need to restart Payara when applying configuration changes. + +All can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_MAIL_MTA_HOST``. +(For environment variables: simply replace "." and "-" with "_" and write as all caps.) + +The following table describes the most important settings commonly used. + +.. list-table:: + :widths: 15 60 25 + :header-rows: 1 + :align: left + + * - Setting Key + - Description + - Default Value + * - ``dataverse.mail.mta.host`` + - The SMTP server to connect to. + - | *No default* + | (``smtp`` in our :ref:`Dataverse container `) + * - ``dataverse.mail.mta.port`` + - The SMTP server port to connect to. + - ``25`` + * - ``dataverse.mail.mta.auth`` + - If ``true``, attempt to authenticate the user using the AUTH command. + - ``false`` + * - ``dataverse.mail.mta.user`` + - The username to use in an AUTH command. + - *No default* + * - ``dataverse.mail.mta.password`` + - The password to use in an AUTH command. (Might be a token when using XOAUTH2 mechanism) + - *No default* + * - ``dataverse.mail.mta.allow-utf8-addresses`` + - If set to ``true``, UTF-8 strings are allowed in message headers, e.g., in addresses. + This should only be set if the mail server also supports UTF-8. + (Quoted from `Jakarta Mail Javadoc `_) + Setting to ``false`` will also make mail address validation in UI/API fail on UTF-8 chars. + - ``true`` + +**WARNING**: +*For security of your password use only safe ways to store and access it.* +*See* :ref:`secure-password-storage` *to learn about your options.* + +Find below a list of even more options you can use to configure sending mails. +Detailed description for every setting can be found in the table included within the `Jakarta Mail Documentation `_. +(Simply replace ``dataverse.mail.mta.`` with ``mail.smtp.``.) + +* Timeouts: + ``dataverse.mail.mta.connectiontimeout``, + ``dataverse.mail.mta.timeout``, + ``dataverse.mail.mta.writetimeout`` +* SSL/TLS: + ``dataverse.mail.mta.starttls.enable``, + ``dataverse.mail.mta.starttls.required``, + ``dataverse.mail.mta.ssl.enable``, + ``dataverse.mail.mta.ssl.checkserveridentity``, + ``dataverse.mail.mta.ssl.trust``, + ``dataverse.mail.mta.ssl.protocols``, + ``dataverse.mail.mta.ssl.ciphersuites`` +* Proxy Connection: + ``dataverse.mail.mta.proxy.host``, + ``dataverse.mail.mta.proxy.port``, + ``dataverse.mail.mta.proxy.user``, + ``dataverse.mail.mta.proxy.password``, + ``dataverse.mail.mta.socks.host``, + ``dataverse.mail.mta.socks.port`` +* SMTP EHLO command details: + ``dataverse.mail.mta.ehlo``, + ``dataverse.mail.mta.localhost``, + ``dataverse.mail.mta.localaddress``, + ``dataverse.mail.mta.localport`` +* Authentication details: + ``dataverse.mail.mta.auth.mechanisms``, + ``dataverse.mail.mta.auth.login.disable``, + ``dataverse.mail.mta.auth.plain.disable``, + ``dataverse.mail.mta.auth.digest-md5.disable``, + ``dataverse.mail.mta.auth.ntlm.disable``, + ``dataverse.mail.mta.auth.xoauth2.disable``, + ``dataverse.mail.mta.auth.ntlm.domain``, + ``dataverse.mail.mta.auth.ntlm.flag``, + ``dataverse.mail.mta.sasl.enable``, + ``dataverse.mail.mta.sasl.usecanonicalhostname``, + ``dataverse.mail.mta.sasl.mechanisms``, + ``dataverse.mail.mta.sasl.authorizationid``, + ``dataverse.mail.mta.sasl.realm`` +* Miscellaneous: + ``dataverse.mail.mta.allow8bitmime``, + ``dataverse.mail.mta.submitter``, + ``dataverse.mail.mta.dsn.notify``, + ``dataverse.mail.mta.dsn.ret``, + ``dataverse.mail.mta.sendpartial``, + ``dataverse.mail.mta.quitwait``, + ``dataverse.mail.mta.quitonsessionreject``, + ``dataverse.mail.mta.userset``, + ``dataverse.mail.mta.noop.strict``, + ``dataverse.mail.mta.mailextension`` + + dataverse.ui.allow-review-for-incomplete ++++++++++++++++++++++++++++++++++++++++ @@ -2763,18 +2898,13 @@ In Dataverse Software 4.7 and lower, the :doc:`/api/search` required an API toke ``curl -X PUT -d true http://localhost:8080/api/admin/settings/:SearchApiRequiresToken`` -.. _systemEmail: +.. _legacySystemEmail: :SystemEmail ++++++++++++ -This is the email address that "system" emails are sent from such as password reset links. Your Dataverse installation will not send mail without this setting in place. - -``curl -X PUT -d 'LibraScholar SWAT Team ' http://localhost:8080/api/admin/settings/:SystemEmail`` - -Note that only the email address is required, which you can supply without the ``<`` and ``>`` signs, but if you include the text, it's the way to customize the name of your support team, which appears in the "from" address in emails as well as in help text in the UI. If you don't include the text, the installation name (see :ref:`Branding Your Installation`) will appear in the "from" address. - -Please note that if you're having any trouble sending email, you can refer to "Troubleshooting" under :doc:`installation-main`. +Please note that this setting is deprecated since Dataverse 6.2. +It will be picked up for backward compatibility, but please migrate to usage of :ref:`dataverse.mail.system-email`. :HomePageCustomizationFile ++++++++++++++++++++++++++ From 5dcaba9ee23179f43d72b91693f2591ee58a6d17 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 15:47:10 +0100 Subject: [PATCH 0715/1112] doc(mail): rewrite install docs to match new way of mail config #7424 --- .../source/installation/installation-main.rst | 51 ++++++------------- 1 file changed, 16 insertions(+), 35 deletions(-) diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 46c1b0b0af3..d9ae650e37a 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -157,49 +157,30 @@ If your mail host requires a username/password for access, continue to the next Mail Host Configuration & Authentication ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you need to alter your mail host address, user, or provide a password to connect with, these settings are easily changed in the Payara admin console or via command line. +If you need to alter your mail host address, user, or provide a password to connect with, these settings are easily changed using JVM options group :ref:`dataverse.mail.mta`. -For the Payara console, load a browser with your domain online, navigate to http://localhost:4848 and on the side panel find JavaMail Sessions. By default, the Dataverse Software uses a session named mail/notifyMailSession for routing outgoing emails. Click this mail session in the window to modify it. +To enable authentication with your mail server, simply configure the following options: -When fine tuning your JavaMail Session, there are a number of fields you can edit. The most important are: +- ``dataverse.mail.mta.auth = true`` +- ``dataverse.mail.mta.username = `` +- ``dataverse.mail.mta.password`` -+ **Mail Host:** Desired mail host’s DNS address (e.g. smtp.gmail.com) -+ **Default User:** Username mail host will recognize (e.g. user\@gmail.com) -+ **Default Sender Address:** Email address that your Dataverse installation will send mail from +**WARNING**: +We strongly recommend not using plaintext storage or environment variables, but relying on :ref:`secure-password-storage`. -Depending on the SMTP server you're using, you may need to add additional properties at the bottom of the page (below "Advanced"). +**WARNING**: +It’s recommended to use an *app password* (for smtp.gmail.com users) or utilize a dedicated/non-personal user account with SMTP server auths so that you do not risk compromising your password. -From the "Add Properties" utility at the bottom, use the “Add Property” button for each entry you need, and include the name / corresponding value as needed. Descriptions are optional, but can be used for your own organizational needs. +If your installation’s mail host uses SSL (like smtp.gmail.com) you’ll need to configure these options: -**Note:** These properties are just an example. You may need different/more/fewer properties all depending on the SMTP server you’re using. +- ``dataverse.mail.mta.ssl.enable = true`` +- ``dataverse.mail.mta.port = 587`` -============================== ============================== - Name Value -============================== ============================== -mail.smtp.auth true -mail.smtp.password [Default User password*] -mail.smtp.port [Port number to route through] -============================== ============================== +**NOTE**: Some mail providers might still support using port 465, which formerly was assigned to be SMTP over SSL (SMTPS). +However, this is no longer standardized and the port has been reassigned by the IANA to a different service. +If your provider supports using port 587, be advised to migrate your configuration. -**\*WARNING**: Entering a password here will *not* conceal it on-screen. It’s recommended to use an *app password* (for smtp.gmail.com users) or utilize a dedicated/non-personal user account with SMTP server auths so that you do not risk compromising your password. - -If your installation’s mail host uses SSL (like smtp.gmail.com) you’ll need these name/value pair properties in place: - -====================================== ============================== - Name Value -====================================== ============================== -mail.smtp.socketFactory.port 465 -mail.smtp.port 465 -mail.smtp.socketFactory.fallback false -mail.smtp.socketFactory.class javax.net.ssl.SSLSocketFactory -====================================== ============================== - -The mail session can also be set from command line. To use this method, you will need to delete your notifyMailSession and create a new one. See the below example: - -- Delete: ``./asadmin delete-javamail-resource mail/notifyMailSession`` -- Create (remove brackets and replace the variables inside): ``./asadmin create-javamail-resource --mailhost [smtp.gmail.com] --mailuser [test\@test\.com] --fromaddress [test\@test\.com] --property mail.smtp.auth=[true]:mail.smtp.password=[password]:mail.smtp.port=[465]:mail.smtp.socketFactory.port=[465]:mail.smtp.socketFactory.fallback=[false]:mail.smtp.socketFactory.class=[javax.net.ssl.SSLSocketFactory] mail/notifyMailSession`` - -Be sure you save the changes made here and then restart your Payara server to test it out. +As the mail server connection (session) is cached once created, you need to restart Payara when applying configuration changes. UnknownHostException While Deploying ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From a48e860a511906296a9f43d807adc271e80bfb42 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 16:10:16 +0100 Subject: [PATCH 0716/1112] fix(ct): migrate compose and configbaker to use new way of mail config --- docker-compose-dev.yml | 2 ++ modules/container-configbaker/scripts/bootstrap/dev/init.sh | 3 --- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 6eab84092ed..d43fce37bfc 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -14,6 +14,8 @@ services: DATAVERSE_DB_USER: ${DATAVERSE_DB_USER} ENABLE_JDWP: "1" DATAVERSE_FEATURE_API_BEARER_AUTH: "1" + DATAVERSE_MAIL_SYSTEM_EMAIL: "dataverse@localhost" + DATAVERSE_MAIL_MTA_HOST: "smtp" DATAVERSE_AUTH_OIDC_ENABLED: "1" DATAVERSE_AUTH_OIDC_CLIENT_ID: test DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 diff --git a/modules/container-configbaker/scripts/bootstrap/dev/init.sh b/modules/container-configbaker/scripts/bootstrap/dev/init.sh index efdaee3d0c3..f8770436652 100644 --- a/modules/container-configbaker/scripts/bootstrap/dev/init.sh +++ b/modules/container-configbaker/scripts/bootstrap/dev/init.sh @@ -9,9 +9,6 @@ export DATAVERSE_URL echo "Running base setup-all.sh (INSECURE MODE)..." "${BOOTSTRAP_DIR}"/base/setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out -echo "Setting system mail address..." -curl -X PUT -d "dataverse@localhost" "${DATAVERSE_URL}/api/admin/settings/:SystemEmail" - echo "Setting DOI provider to \"FAKE\"..." curl "${DATAVERSE_URL}/api/admin/settings/:DoiProvider" -X PUT -d FAKE From 6f5cc9f761e49edf2ea67caaeaf67dbc6dbde4df Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 16:13:36 +0100 Subject: [PATCH 0717/1112] style(mail): update mail config release note --- doc/release-notes/7424-mailsession.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index 8a3aa3e956b..67e5684e569 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -1,8 +1,10 @@ ## New way to configure mail transfer agent With this release, we deprecate the usage of `asadmin create-javamail-resource` to configure your MTA. -Instead, we provide the ability to configure your SMTP mail host using JVM options with the flexibility of MicroProfile Config. +Instead, we provide the ability to configure your SMTP mail host using JVM options only, with the flexibility of MicroProfile Config. At this point, no action is required if you want to keep your current configuration. Warnings will show in your server logs to inform and remind you about the deprecation. A future major release of Dataverse may remove this way of configuration. + +For more details on how to configure your the connection to your mail provider, please find updated details within the Installation Guide's main installation and configuration section. \ No newline at end of file From 930fc1b1ddd34d9b30be2826c011f9047a2e0774 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 20 Feb 2024 16:16:16 +0100 Subject: [PATCH 0718/1112] style(mail): update mail config release note about source of from address --- doc/release-notes/7424-mailsession.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index 67e5684e569..25b1d39a471 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -7,4 +7,6 @@ At this point, no action is required if you want to keep your current configurat Warnings will show in your server logs to inform and remind you about the deprecation. A future major release of Dataverse may remove this way of configuration. -For more details on how to configure your the connection to your mail provider, please find updated details within the Installation Guide's main installation and configuration section. \ No newline at end of file +For more details on how to configure your the connection to your mail provider, please find updated details within the Installation Guide's main installation and configuration section. + +Please note: as there have been problems with mails delivered to SPAM folders when "From" within mail envelope and mail session configuration mismatched, as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. \ No newline at end of file From 2e9d4144cffcf97d7890b10f14f438c950f8ab89 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 20 Feb 2024 10:16:46 -0500 Subject: [PATCH 0719/1112] fixing CCV datafieldtypes from getting overwritten in database --- .../edu/harvard/iq/dataverse/util/json/JsonParser.java | 8 +------- .../edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 5 +++-- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 4287cab069b..16cffb92c8c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -741,12 +741,6 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar ret.setDatasetFieldType(type); - // If Harvesting, CVV values may differ between the Dataverse installations, so we won't enforce them - if (allowHarvestingMissingCVV && type.isControlledVocabulary()) { - type.setAllowControlledVocabulary(false); - logger.info("Harvesting: Skipping Controlled Vocabulary. Treating values as primitives"); - } - if (type.isCompound()) { List vals = parseCompoundValue(type, json, testType); for (DatasetFieldCompoundValue dsfcv : vals) { @@ -754,7 +748,7 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar } ret.setDatasetFieldCompoundValues(vals); - } else if (type.isControlledVocabulary()) { + } else if (type.isControlledVocabulary() && !allowHarvestingMissingCVV) { // if allowing missing CVV then fall through to 'primitive' List vals = parseControlledVocabularyValue(type, json); for (ControlledVocabularyValue cvv : vals) { cvv.setDatasetFieldType(type); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 375eb92a6ab..1de219e765b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -3,12 +3,14 @@ import java.util.logging.Logger; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import org.junit.jupiter.api.*; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; @@ -27,7 +29,6 @@ * /api/harvest/clients/ api to run an actual harvest of a control set and * then validate the resulting harvested content. */ -@TestMethodOrder(MethodOrderer.MethodName.class) public class HarvestingClientsIT { private static final Logger logger = Logger.getLogger(HarvestingClientsIT.class.getCanonicalName()); From 2018c87acd9c86cd0131c5fcd8228ce4254ec488 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Feb 2024 10:50:36 +0100 Subject: [PATCH 0720/1112] style(ct): rename Maven skip deploy option To make SKIP_DEPLOY and the Maven property more alike, rename the Maven property to be "app.skipDeploy". --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index bf5bf16d423..aaa2b49eaae 100644 --- a/pom.xml +++ b/pom.xml @@ -916,7 +916,7 @@ gdcc/dataverse:${app.image.tag} unstable - false + false gdcc/base:${base.image.tag} unstable gdcc/configbaker:${conf.image.tag} @@ -929,7 +929,7 @@ ${postgresql.server.version} ${solr.version} dataverse - ${app.deploy.skip} + ${app.skipDeploy} From 626b2a87cad6a9e610c7a587bb5960997abb47cb Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Feb 2024 10:54:13 +0100 Subject: [PATCH 0721/1112] doc(ct): rephrase and extend on running container dependencies for hot-reload - Make the description use tabs to be more aligned with the other tabs. - Include option as a tab to make IntelliJ run the compose commands for us --- .../source/container/dev-usage.rst | 49 +++++++++++++----- .../img/intellij-compose-add-new-config.png | Bin 0 -> 25929 bytes .../img/intellij-compose-add-run-payara.png | Bin 0 -> 14908 bytes .../container/img/intellij-compose-setup.png | Bin 0 -> 45986 bytes .../img/intellij-compose-sort-run-payara.png | Bin 0 -> 9725 bytes 5 files changed, 37 insertions(+), 12 deletions(-) create mode 100644 doc/sphinx-guides/source/container/img/intellij-compose-add-new-config.png create mode 100644 doc/sphinx-guides/source/container/img/intellij-compose-add-run-payara.png create mode 100644 doc/sphinx-guides/source/container/img/intellij-compose-setup.png create mode 100644 doc/sphinx-guides/source/container/img/intellij-compose-sort-run-payara.png diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 6dbd0276cb3..a8e7efb7edc 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -144,15 +144,13 @@ Alternatives: Redeploying ----------- -Rebuilding and Running Images -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - The safest and most reliable way to redeploy code is to stop the running containers (with Ctrl-c if you started them in the foreground) and then build and run them again with ``mvn -Pct clean package docker:run``. +Safe, but also slowing down the development cycle a lot. -IDE-Triggered Redeployments -^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Hot Re-Deployments +^^^^^^^^^^^^^^^^^^ -Triggering redeployment using an IDE can greatly improve your feedback look when changing code. +Triggering redeployment of changes using an IDE can greatly improve your feedback loop when changing code. You have at least two options: @@ -237,12 +235,39 @@ To make use of builtin features or Payara tools (option 1), please follow these .. image:: img/intellij-payara-config-server-behaviour.png -#. | Start all the containers. Follow the cheat sheet above, but take care to skip application deployment: - | - When using the Maven commands, append ``-Dapp.deploy.skip``. For example: - | ``mvn -Pct docker:run -Dapp.deploy.skip`` - | - When using Docker Compose, prepend the command with ``SKIP_DEPLOY=1``. For example: - | ``SKIP_DEPLOY=1 docker compose -f docker-compose-dev.yml up`` - | - Note: the Admin Console can be reached at http://localhost:4848 or https://localhost:4949 +#. Start all the containers, but take care to skip application deployment. + + .. tabs:: + .. group-tab:: Maven + ``mvn -Pct docker:run -Dapp.skipDeploy`` + + Run above command in your terminal to start containers in foreground and skip deployment. + See cheat sheet above for more options. + Note that this command either assumes you built the :doc:`app-image` first or will download it from Docker Hub. + .. group-tab:: Compose + ``SKIP_DEPLOY=1 docker compose -f docker-compose-dev.yml up`` + + Run above command in your terminal to start containers in foreground and skip deployment. + See cheat sheet above for more options. + Note that this command either assumes you built the :doc:`app-image` first or will download it from Docker Hub. + .. group-tab:: IntelliJ + You can create a service configuration to automatically start services for you. + + **NOTE**: You might need to change the Docker Compose executable in your IDE settings to ``docker`` if you have no ``docker-compose`` bin. + + .. image:: img/intellij-compose-add-new-config.png + + Give your configuration a meaningful name, select the compose file to use (in this case the default one), add the environment variable ``SKIP_DEPLOY=1``, and optionally select the services to start. + + .. image:: img/intellij-compose-setup.png + + Now add this as dependent run configuration in your Payara Run Configuration you created before, in correct order: + + .. image:: img/intellij-compose-add-run-payara.png + .. image:: img/intellij-compose-sort-run-payara.png + + Note: the Admin Console can be reached at http://localhost:4848 or https://localhost:4949 + #. To deploy the application to the running server, use the configured tools to deploy. Using the "Run" configuration only deploys and enables redeploys, while running "Debug" enables hot swapping of classes via JDWP. diff --git a/doc/sphinx-guides/source/container/img/intellij-compose-add-new-config.png b/doc/sphinx-guides/source/container/img/intellij-compose-add-new-config.png new file mode 100644 index 0000000000000000000000000000000000000000..cec9bb357fea359ed18f3595c744cf50cc868845 GIT binary patch literal 25929 zcma&NWl&sEur3P0-6goYySqEV-QC?KxCMec1b26x;O@cQA-FT>o1Amct6O!?xpjZ+ znwmW`dsg@A?$!PE*U>6U(n#=l@L*tINU|~#-@w4Yg+Z4xEHvm2f7qBU=mpMMM#l{d z40-V11)fTej1LAz0wyaVs_t!gzUk$UxwtiQGc#$)9k9UP6Fu*jY6;#zrY*up!v%?; zk5z|-|3yoZ7~vNU*ZGQQNf=!Z28p|vuQJ!VcW zm!03O_uGJ!A}5&QLb(k6D**pe@9dksKE(XDnRYIO5RyM|v}9y*2=Ot}^J6mT)=6Op zohc-+{eZXxRwi)SqzQb9T@1)R{L$%fWmL#My{K_&*buE-7l(4C^-}1qPi2p*~azpbndp3C~WkY?cOOF?52e@NKw=eoff)4#yyBA_&CYN zHj2i~L?XMHUnQ8|=?dt4<7v^oY)8h@&sPTa$GT3z|46w&QU=P;))|qcXXta&!iHQY z%!UsQ3ZAW<#8dR`7n#}+pnXqUfVz+gYTm{twXpiGBhCebY63^9#M(I27S<$ATvF_# zOkqUWO2202w2J;7Ej}tKsUN3j0a{C?+P+;NQW287H8x_YcXL0sHB7&1?79j2#&@zd zh$&*(n`1mTwaawqqTxr$Y48{djxGh+uB`*bAUwO*_}Xl2NYVC_V#^CYKY|EZ^o@W3 z%!I8KaNah4r+|AdpE5VqTMc_lLEDD7MaO|k*$j?o{oF~RAbhG8$gX13bk1{DIPLV> z-_ldm-g95xmTE-V(%UcPP$72<%jY-&*T%%YMnvE3ZR69nV>aIO6*|QV*V?{t2B*AN znCDE56pD(75r~R{KNAWmL{ma&H2g{vqAg6drZL&YRLGC6k;RWgIPUzf=h-+_g@M&Z zIYI+#Ncah+z6?ic*LHoXN(p_E!8z3ZuKjMXaKd{}7qQ6>y}A=wj3Ox+32&VRX$jS$ zB02>(NP*gcR8$-{?`sMkmh)x9P@TFa)BABT~yzWuxtpIt3VJpZtkS>aUSP??pJ_W5OvS6Nyx z)+@s&%z^Jb3Ik86(l_F}gs z(Q>SCDB}X2YZa!eS~``2Ls1-pLSwj1r604n5UBlA@oQ}t8pu$sZQa-W0&=2{VAP9^ zLVte1OE<{TSg&}%TlPew8=z5UMVn^BCEJ!t(Dqw0i)lE-p9Kb!*DSlQ+yF;MhW z)k!h3U#cvJ9Va9;6s@yD;8zgw%1LlCe>z};9(R93El*2VtIrV|6Z`y}T=Kv4sCu)m zVZS3xD=9CbOdTB^_AB+O{`2&(Vl|5=S;QipWH*V^axgtHg<@h0*c9o zC{a|~lHnng-_}2^DPW2!**Y@6zI`k_jb;f~DkcyL`uGykl}Og5Xg&%nid%u*<3S4x zyT1hA(FKCc#G^oSi22(RhCMqvDU2y&W76X?Yc-`*^>fk@b29o)D0UK9)|ZvJ`cPZ( zruA8|{n;<-#UxmZ=HhU%A#2;m46q?8)V3OLwRbe z)4*s>MkY9s9`~dKd9KDq;KqGP^nW)=RJnXv4oa*M)0;Wx_gW2 zhnk1lPnG>=>?Mr631lnY0|G!w#F6_EoB!{KlKtW63W~zdZD>QFv#E|E26Wd@7E@Rh zwfkGRp23@HE&vPk@iIF3zgtN(pdIk0RDFXKy_PF{h6~wAwSBX%<+@-re1;H2^y} zIJoYEK>+SQZFkuH0YVj`bb0W6`UMEBDgns^eeVyVNAisy&yU9@yIG({myOG>dl)8Y zV#W)|rEl&YN-SbaK@uCz*u`!S!X^8h)o}B=9@N|6fHCT|?)AZxvN^n{n*-mNen3M* zs?+#`1--X?4Hcbv13IZ&!;H?%91l>&uJ$@)+bwSwR?;;xEHulCQxxAW7>Qn68tT3E zlap04_=8Eu6Hp0|^|y2=RqM6Idz>wGa4i(OU{J+ns~-voDw4F_A1@jO5;JCRBBXtNTyk+YaHHZB0QPIlIYWcqGu%? zvV3~ra0Y9Ue)f!>#%ssy$7!OGsw()<(2#YOX4%L>YGsU78&rV;D4GFs%S_8?V(ip% z>6n;GSEnX;UfwTi4LnLg`h$FaO%DQCZHu}9@^=;`ZU*i~>pD@S{)Uv=;D+FLaxKe& zWRj6pENWj--@?Yl6%o^Ch)hm!p#buLO=WSF78grk3iX{u??oObZogguj@Rznc=P`* zg{S=C$l|d59j#^sCcW$SK}13-gsq_=E06O8ve9L1K+d_nZmYeX)4Fn;bjKdpyMZaaP&VsRO1DBDw8B8vWhc>aIm=Aa+^MdBq% z)4brHD^;Zt3O*+e7IiiFo3FNQm~j0RwdY0T&#!d!AQ|g@6akq4qY~eh_u2bN^yKN1 zkE`MUsD*2rHXMANK`G1Z>ky~9d@0j;-g19|{?dt=iqy)yc4F}5R68iBA^=mjfAz=*Q+_9&Va>Q_8vSWs&Q<($mRqN(%{K8z~ z@i;I2h^@v;oA8Z1$!6tKs`D$#&8gWFo5!WxeB)EoOG0Y(hAVvb*&a?#ycD;K{*;0qT&g6 zHT7_D>RTg!*`EK_H`sS7rNT%@{40T5Hb7nj-m*$--;!r%~6NkV=0THSF4pzr>$N?(`bzbO;k8 zWkw;3ca&Oio%L=?#He?Eig(f2EJb=2i~udh=#*^02U{)_j=314=Z;;i2k5v6>B%bB zr}Wo{ql%s60Q{E}_Q!SA3a$Zy$FG5ek;>&Xw5KEp*xh{~t!vlsAO+vnB_|1*0I<`6aDBd6Ku&zqs-5z8$HjXh9y&=x;;nT&th1m_)YTtoR}JO4zI6?BU84)e{1Z;k5953k zsjQ=_`3yEG4ZL~(6h9>q501~6X7{o1brOqZA8q{l)$%(eTajA zFBYfNol)44d$Z8=;m+js)M@qS>Wa;(KL9vqRK5NsjTvYr-l>3Kg9zxxrc+vSP?`?V~2^X>1=^gXZ?Utd{#^|e;+g)a&D zGrLB%2R0mzZqKChUN)0wR}-#>@QqyAj--h27QTbQVGh~&W6y@pCBOcv_RaJgRL_uP+D(=8?BTm^3+158so6u&V#*k{8&?Sb<8d) z&VU{~tI3fOI!wAQRLy8kvX}_m5AJ~}qJ9zsTrge|^GA1$D4Q>#eB}UK5II_FKPoGT zdELqrWwK=YQE!f{>s@dV`w}|%+n;N{!MbUd7NxxnbuADboH`}ZpUbm7THLoYDUDn> z9@#VX*RAaz$%*m5KrM{KTlT+SHBM0HJ^vHT?n_~Bw%qnchH zzhSP$ispU+n{BaH*89}w`b?6E_)^YmZ`cq$Ij=^j{H~Em0cUrs)!lGTgSM<}Ayy{h z%y;o%vl@;pg%I9}jY!BD!Roas7D+;{37BZ_jz+F=rq)BIV zce{EMp&#(dQ@Y_rZ6HQ@FWUF)M^lOO^6uMJG!pN6zoNQ)#7F&syqKX7N)O|lnSi`` z;r3iE;G|!4_PZAHGc{_J8O%tI#0R@7)(DF@7a0zJ1P~Ca+%;+O7MjfS zHqp@#g^4Q@Tufc+f#S=@HU>T_kQxprGGR56HX!5W@k^Yay=Jke`*TXuA11COAQCyO zMV+_anL6CjFfuJus&h|%K&oLhTUiSi97yF}ER8_?t(fGEwy^$MzfMz=$bpw zrtap|3OjN=Xy1oddwc}&@VW4CBXREsoyb30T#|C|X5dTfE7xD5=3h*D0l|huC$^`L zLb*!y`Qw=Aw@mEXA@Dh{En3&g<{4kVDW2v@(T9ar^}bX=AN{(tPGvIaltny zai+?RpXk`t&;veVZ7dnpZboNJKEMqc0BbcnD$0w_2xR1QL!8~8Y45T z$$!rpxHKs1@Uxe14vk&SGq{N{HS|dlGH-4R?T2_`J4G5;IkN zd&kfASm)Bb8TMr}i%-r@m$r@g_xqh&;4&6v?T$3Sjsv^E!4^m5*AOzY&bXb@Bfb@= zU#j}rh`s5oJEf%MdRk4K`x~8qqcy6#gRhpeQ#ZH1>_H^>ip^j_)KM(8rVlEvVky0+ z9C%bK&&^qi7@0O>_}`7wGblUs*PnhKo=Q8BozE(L8`t~~lPYt25v48uc;q6sXXbh0 z`@{ama;(uSxDc#YDmZa&RmMue5Itk)ll=fJ+2`d9uUS_rqOn?O;fKIVO)=<_QBzm4 z-<0y@tm|_%|2iZ1AQZiNgV=o*NaX7*u*@}TJcO@&J6_cJGH*b9Y9+`7_rajQO8ZuB&#M3(prllJ=8MqdRa< zEQJoFdLMEwz~EA{hifec?KFiyNbDK!w{9ED>%-CP7G)K56@Z}8beJzEoF9QyD7N}H zIEP{YBW4ZFXKV)VqBk`oFK(e_tO}j3=|e02kMhCc;aIRDfTEaF97&jeBZ9r761Ct% zzVIq{SiJ~8RL9WDU)O;WiU3s74^4RZYRA~vb7Hyj+Ki3DPJbg`(@rC0oQ3?i&N@^i z*pm;~tEs<}MlkcbbWHvJd>nM22*~b#d3F9V8hZS}TIyyxW{Z z`2Y;9?@t<1WAs-8f{LUQ4lBy?oOL@LqZ0F-Wq*Fjnmta-{<_kNM(T=Y6UlvZ=LK+M zSq54vZhdZPya(3t|JLot0#TIpf8$_?`$ z@xBz<>OX_UG$EV8Ry(Ggh@YKZ?O#TtI9oj#PfLDZ1`kk!-r@|Kc%z)fJmy_< z`{C#vqBD?{`%1bv<_RSl>}&Wo9>#eoAIT4(sCL0BHqx40G7W^G|V*tlADUAeI z;C*o>(B)0t!2KEd`XY%zx;b^n_;d|iK+tw_S#@kW%j7ac%zclgc|;QBzvsL=4jTLT zQghFVNO^2UpZhXQ0waN*(VC-`XE>f`idF|gL^!Zvizk9>4{D7@ZgLnYh^)thds(qi zf|dDSxYq~Sld0UEo?Ry8;!O6;25dCB%5VQ%#ec9#P2);lkB?-s>=Iq(UDIrzIa$42 zAB{!~yI&TPyF6l2315RfZL|er^ia9UBgnj%VuNzQq>h&$0gemBrQO>r4Oon+%0AC|v>SpqtZ8=KY@W_J-0!nc1#*UWek5PJ z*ng=(pDpD}EyAy6wWmYBza;MXL;g|M3~Ec`;Qy5=WB5VH-|siuBYG+Z%9UqZ&ZV6O z_|o3}zpw{$h8yJ(kKE5B6zL`dZ(4l({Z`_fv>ATjNA4C$pc3jlJdU5}>dP_@`1Ufa z*9xW%TF{SO1yWuhmLSlM=m7r0QX$pnn4V1|7Va6q_mX%gHa9mNej0#0E+}6@d*-`4 zTbOaqHAM~QZ*JrY9q|`#V3u+`ZT!$5{YJN3DeYE!Hu$)(2$`RTp@H1)1-oGf1XNY- zIAG=Qo?yH&j$WG+D^+?J>N%ova(H}%2fU2BSO(TS!{_4Wd=6SulO*!n@%3PEa$r*- ztNM?=oadhSIma7TA=gg(ghR^l#ZSRYIUnzWfh&5RL9y{ge+3ZsDRJAbxkG?0Wc`Yjn}(o`#Q-s5WMjw zb?zk^LyGMs2Zt)CX!0q@^KwS<1cc(njFI}uQk#J(jIf_VhOCG`&FaiW?!>E~Z8`C| z9BJN}6R!e6n%(}cuj-jaBRv;1$jQM1=E~wA75B?*ScXN88umAI9VB z?J$E!ctii_PTK)2HI5wKfnp+E)iSxToeS0X#2Q=wIwNCq`s$%frW!V+x^v~wci~3f zHq*q$b9r<21lIcZ$n#a06_mzD6+cTxBFW%sVsTBfE5HjQHI|4`XrPyU@1-;P9Cn7sekcdfQSWatZD0RxbVnvSO182}f{y2;gvhi+@ z@{L--gy*q;{~u|iouXY$2BE-=$G|Z=V*8PU<_r?pOWCP44<=!&J84QTt~`(xO;5a; zG5y_l2z>Xx-3P^lCtA&n5V*URpV>~O-TJlxxSPJ1{K1hO&mR1hB5I zQjMIpvCBs01Bj|BDg$J3;|iX2?r2%MjKoksrraC#i)_w|3eD*MwE7C((#x&N+;VFO zS9+RQmmm2zR{7sgsRe0pR70&Ev~v}iGLBaNy+fbHoTNHdme;q@MsTI=$)SN2L-HBo z1mLMQ^0fOn0VB~u!^FF!T){d1bqH!yw05W>SWUVPOzSe zANf#Jk#9ExRca?wm;E|7x!tn=C27Fg?}G8(4ts}fAT_MvL1Sa({msfFEiT<{aduiF ze+2sA+A+?@n=Qb$vyouvr;Z!$;<$}3K5vFpE{x0mD4YT=XI$0Vhzb|)jIe8UxV#e5 z=fk_3X(b5ut_#Z5EVxFpb@>8jw|g3ckXFUiGo`4TwcdJnkaMX??osbVn>0N@b$hbE zf1)DPCKxPX9hr^mV^KsWo*|GZ#_zlzWjL5S61$)dl|2>n?SUf-OGfnKWJ%x$IW^%7 zkNihX`$LCU#?xKV^cFVga6_JxKu+qxf6Z5pC#mT9iT5f z?B4c24I141?yw6^RZreJVq%;!$X>upMPHt-V5(3QBPI~{SD@)l<)6?1T+yMgZ#1gz zET_Xzc?@X-!#l6*4P+Bo`SQwt)4e3#AS6(ZmwVmy&dl9D-44tP|2TiG z=C-Vo>neqQd=tu>z&rgq&j_tk`_b0*_nwQ8k*=iVomgkNqm|J(pjR0y+HQSRt%9#Y z5n^`XJk83xN|_*uG-UQoYIGqrFd+7dUEHhSq$gzIt+2>wdaRSCt0rC z^;XULNahbGxaEk~^mxlE-lg82jNWVLnB|kx`SsCNVxyxmi!t0ve&`Y#Zy-JAnpf;^ zW`VdjET@}PzLYsZu^YJPV1p|byXpMm)KC8M-5b;ov~>L&`lfvqa3GYWCSWzK1_psA zQCBQ|Nosj0C#9rqYR_AVDH?`~rPv9PzE@dN(xtcr*gK>!peNAm4A{q+G00ZiTl0#H zC2faU*vgn>wDW2_@Q!tpbpYXb_Ax~NZl>!`)GV)qtUHM~nXJ1!DBT@HsstOeuH5z4xyX|MC-?xAo?*Ua zpB4d!<=J>vcIlnF3-Y-4Lk`VtsttiNEa;OZ-lb)u;aqk{X@Ea8hGtHmX_VFVq?Y${ z>wv)hjY%20PNsQxQo}wO@I7(NKe^D@+MN)i&Q9_^OZ}UM$Z|%!1N`cp9Ty$RNXxGV z*C-8Y{kFLy`?}jEy0pKN?Qc0@we(IvKw{FpW zYgRdxOU+s_7uX@L-KR{U48-|Y`%TZJ`t}Su>hGQ!MT1;z9Pq77N1v@+R<{8V&Pb7W z%?1WlTkdMhw~ONo@jeiY)XyG3squLZ560Iy>9$NPetFoKC=#-j{@RCE7SBrs&OT2t z>m~nNMzrRO#pq#DU;DDa!_B89K6qjh>6Wrw6X7oTqAwJo%+T_1haCzUH}2IJ(z0*w zGC*QP<&obF)2v*zg3({=%LtFG4Wa$K2u2Xgp(1&6h@{jr_P6_hL@2v^9p=)3th>-@ zfTd_fzq@BoMLw7WoV!1RmbI~|l_tJ=it5SjnhxKxiRj8IRz;IG^x55K%uKUOvSNN= zHMouG*=O)4!_*3mqL(o#uP;JHqULzP9yhu9DRKGe((GspGZYef2`|+4EBpbjj?DS_ zx*c8kU+=M9cDfPFY@@+gdDibWNiwWweGidv?n0IzdQNO4PE5;|G<4Q0eBTjq8B)~P zxzVOyIX{k1FEbN?3l3SdA>_bndCQO~$FrW_79i@Ebd$vbESy??c_YwrU%D8#xSAuo zB;NY@lZ5apD1Wc9IXSDeZ82E6s}f_mQr@;IRv#%L+JVTrRM7uzDStBHmb7-;@Zp2z z{6HAm79{E)avekLm;UfV8OUFm>KVWTo9&fkSw}u{o0v41{J7?M+FYBp_~B-o_8&1_ zF|usj*=)! zOZIM$Y^jqV3Ui`2f77^dzIJ?HCN4Numv2uJ_{J{atw>MiZ$Tl>lWz3Mf?{B{JG1?$ zG9a${z06DnpC}*Ue86n4n4io~6k>rz>{!Jitt%SF(~eg737$VJ zJr3C&uDZIV*^n)QixjmQ$$v`a6zR{`*|}e*t78Yb5TtwklPz)$ExdJ-%P4rx0)>1Y z+=#M(bn;G&r4o!gbyuGxz=(Mx3p@1u8DPpQapqKHn+^>bY`}}{R?tP1+2GMaMqt~I z;)g3g8G$tY-33cd4|aNFq`f|`be0McN}lp4LgGm?#rTMgN(_hF$NFwDrbem0C9mhJ z?4nGz7n`jh65L$K(*3h%-$yt9pg+*ZZV+Anq|A%|C*|2a*Yz8y6&k0HU+16HU-~Xt zYtXXK#8~q!J93b-QqGniCG`<0{=z6~kPGA+XsFmR$}UV$nqKedLZT*NCkm;_7v^7e zGzcBQL%nUsz*;xn5$WZHITPQ-;ZQ;fa|9;ggBAL(c?G_{38|$M)RJ`|n}4E)l#szI zjBoQ#9|{nLu9Y8o!+IPUWo|*&+XSI;3%iWA0Q+6i(=;ol#P{$tC|hzP3NdBOy zzJLE2>T_qUQtLvte6j|{|Tq~PfEsrHfBqF1M;_jiE6RB9?-nKO;gV0 z@Oi`l?~apla}S$~LAm6`pR+x$*9))fY5ptc$McTko0o^PnV@{%h;+j|MJcI8l#C8a zn1ymPZa)2`3>n%4wO6i2M`d3^!Etqf?C`|bKg;<&fD4kM&f_7-K6%j7aBy+e3$DQ_ zsA?ruF^{Hm6DTxtNN9zH^HI_nEMHq4u-3pSJ~a{jZf&H7%j!YKmZ?i0A;6ZzkHU1Y z>H|pr=HnR>geASE3#mgd3FGHqRcrY0mAvqGiHrkz6+V(FbW#_)EfNtrsq&DEgW`F;_#o z*Pa)Y*U`am@44SN_mH%=Q^Xo+DCG!<%2-F)x&6QZ{AK8wn2kL}fJuHyjn3odkfee% zo1?<+ec=kqah;6QanRKbcV|bMoqK!ao#f~q>OpuaO6g6@$**z$iyumWDr>yvYUtxj z>jz4A+}5&4yu0Kd7YsEt#-^sI1j&DFY!KLUd0}MorBDW5)Y0A0-#~I5-`CzGXYw48 zre#61n~AmEevKtOHMnAT7#CE?`hYqrKyj<`Azdg{*uC`IJ{ZL!$JOVnVi&qN*|x`+ zQOKIy+#g}3XN`zXKq||OkC8RexR}7m*KAf7I^6Xub$%Kg(XQPbUFma)CoXrgSzGf; zVDaoXr&9F!VOHb2utFK5iQ2tNsrPn!>k4Z_EO+LTjZbqy+nRK(Bu@v1o7qa>1yXH5Fy~07SV73`s zP{CR9p0x{&{{5U)}l-6jwY)PRA0n9iO3lr4=?t}*M>QPn{nn1dH744iKw zj(M7-wAKDD#{m_1c)tEf7U3#nB(s2T&&nD(3rpiX^GnkK%&=RRNlby$>KeNTrTaOn z{#nDz+@7cRInNfA1NTf|2Q)7U?Gez>BFLpvQE2XyE2Br4i*DaClKM~MsP2h3B2dLq zkvbC&S`A4#nFERDmp~}vasDGV4h|YIvC*j^V694Y561%}xv}u*n)pR#i#H}XUl5{B zbob}qwj6b^p@H6%{kQrP>a`i~#F7Cl*C+phr|gBVe;Z@zeB|^nAiw+OZXI3Tw6d8E z#}!@#IIK39(}Oiu7nvkTJx4yfS!%#9y;bI~mkZrgA%-QZsqJ zimC-?wK=+fk91t`_l zI7uH{k^ei!ZYzfk!+k|rGhRsr+b?_p((YHig|QXgi7_N5=ymYk?4aK*8mM8u2S#@` zb)W#}5cU7UTzPmD0z_;|qY`F|RMBMm(;w*k`dw(;w5D1Ozl4quV=&~{P@sjt)AVGu ztOyID%~ot-3;T&+BKaklehY$taWzqfrm)Z*ouThfvg%Js`q8o?rs_%hZHaDqakC3^ ze*J)D&2SSVpOAhf)x#xkB$b?>rM+u#Y1`8!(UTVTv)xzkt#cUHdq)nU;X5imEChaV z3^N_HdOvFwQUiUxa79$Ibg0k6sgVJ`z5;PfEj=CE`Z)6^e^PS1&FsS6 zB8h!bo;&PElb_$srv^G(WCQ5~97qi=O}65QAZY``#-*PvKlIZsrljeE2+dATv?QX9 zP}2&p4pU{y{$f_=WHKXccf83dF%|8&cNjtL2F?TE>NV=cfq+YM-FnAB|^#ahP+& zS>viki7$`{4jmw=PuvL0_6kzZWR? zj^FRT`kdG;hi_inC>6bBDrCD7`1Ly*>Rz+71-|1hlv0=>U5rd}OS82P;R*4<%c$O; zs0^w-DU*y*U@Uy98fLL(CO^$sC6{1%O}7)i-+oolc7cv1&Ff<~+TC48zMCD1B-~9A znzGIij(r^_C~R*PXBIc(^O{TK@q^zcWQy1!Qt&flYpdQIOLC}M5%S>(zz-T5+lPN{ z-5+jWGSbv`o=6g<&uec+XY6|D$n`(+(aWARM2X6j7<{Y|zbCcEBx{ zyloHKsT3*zC%t^&K>svxFXdP7iA-M%lcd~|FDNBcwnvif_ zT!Dd6CpOised8B74-h|)`j7BK>+>pB>Vj1)4I9w3p`50qNCfE_AZ%C^bSxr;2cP=~vBgrj;;dt9j!$rSrfxktg z0(;C!7j3meiT#)Ds`(L~HP&8af-;W@+C}?orYBY{+L0Bpv{Bw%fw9|`mcyv3d)Jxa zu;FIbuRX+X-{xmeNl?HLUNxP>K1GbOg3`oJQgg=?at<@H0BaAB1s@M# z&i3_;PSO9i^m?&v*k|7?)yI!axnHLn4qQQ{3c+T$&u^9nw0*#R#cFM|K_>N_{s?hfo`X1C;Q1)g6f%tN_d|Y%8?fWlO z@dw=hf9a;S;7ul;tt#+&?%f$SP7*m=pyh{H7N1zS_snlfPgb{rJc_%nFa3$F^fg4;#!*S80dRDv zpsiL?|9W>cA~Dtrx%zZ7lrv<{4o-0c%u(+6OkDN%&JHIFnJue3dS8zJ^K5ys_e1^} z@R)FST{zQs8Jdzf>Q_hg?H9OT?_TKp42#oY?!fx`p*|_rG2+S=e!a<#k8?W(ecqB_0B*Ub$T1X)>AV%J!o-gbY%!=zpupOAU=y~ z?)KUxm|xbR<*NnD+n=sfbh^vPC^camr5b7ZYFqDkVckmBxOoXHGGLf8usvp}#t3!L z+4HG0r2A9$VnKD@4F0z_bM|M`C}tmY}*s^pzjSeB8A#LAR%qobq2NdyIRbNRjMU5A`7v9UUN9;rvh-H7d_`0`T zqvs`+Kr}(m3>XEGKI!f$si~85awgXX7X@BU6oUUE?3Dc60;N5Ys&#chC>Z=q78mrp zqp1q@=bT(nUFUOm#A32X5fb{Nb`wnIe*(Y7dOk5gd6B`wzu7Lm#~(yLe*H3z`8)Q| zgzwZHec$Z9RHGN0pHEE4??FXHl@=H0p2h8?Br6N&*+m!>6eK1l_LG9}2Oj6%P9Fr5 z-fhXn^QuDhb9h;vcXB@0h57O$(p$q{78mkTmzK_W4=eK!L_F;E4xpmBI7#0J8&`H* zP?T`eVuXDhHW1oz>0*`Mt zT+_t)#Nq>JjDSQQy`Z3=b_(m@_!u-yq+Zavc*d#>+6mzGzz`%f^zDXQ6K}v@iasfF z@d!jxZtC=uCadS05n@~(YgE-l)oLR^9vlYM%@nth#>UT)`&O`!F2y}Y?lE%|y1Vwz zB9>BS0)@X?RK8oYn3W_XN0+O z%us@cS<7=WN&(etDV&Smlx8E6x3A>MKq|?;*%Ll0!{wM0&Wd6vjm>mlA&e)`f@FJo zkF>CKl4ON^A)AIK>rDn|X=rxtGZ%uxN;QN8t5tooZD6Weq361s)Xaa^ab(&ny4v)q ztLeON>dv1BTax(zz0P5;M4zBbB4;UC6_@7|8(RxGEEvgF5*sbgH!`8w{yRW5sZw*B z8dhwxe3={&7l*jFkFip3LJ0Dh$>!4pV?^X(+olG;8OIO(2mbwGrJYT$Pn6)WFmE=Yg-|Bg- z_q@P6_vBZ^<1Wx^BdXSE8n>Va83K}stmTxio(#Jmu)Q^>3 zv#zsBM~(C?su7g^)b#AT%>cJmb2^9nvyo(C6qDl~<%WJtg^aoq>}Z?Cvg)yqf-o3S zRV5`rH{pkg(GyPgX0Ww^SZ+ z^(Z12Y0MDacVAk5S?(QwZ0y(i9$`C8y?(xCnEO|4=}(eCV`UUsMJ;G73UCa9%fP}V z!~O53v{IdnYw3kp^|xw}!2PyO9-m<7g|4d1U7YU>=Lf}qhg{dWj9~&0rn3fbxs&wu z2<>5>9edBn-9kw8$NxV3;}gN+#QdMrPyhR=(n;ay5gnUQrZr=z9|**fkdQ*h#)B4d zG-O)U(IF7<4pz+OPV{XJ6wD-p6?@mXirwDn6K_9E)6>ark2stFPzy5t*yz>=>lYh{ zivCgJ(B&!czHJ5kcy-cg*yB!T{8yrCf__ElwK0aF{1$nCKYn+gD!A0@v~8$a_mZ4$ z6u2001A?pl9!PZYEkwi{u#>Zs25SXJ#^|#beld-iH1~8;3-N2#(@#9uY{e>4)vI#9 zJlHGh|N3!Wv@W~h9JO)ht*H%v(O({I!1E3yLm!%~?5C{VGP+whLsH1)%Ow)@L&L>A zSyMDNCK2>~H2wSM^yC40*%9VnOIM2Ry3x76zdy=xB?5fk>TYXYnWecR|7_lIl%sPO z&JxK^vkBrqi(L1FM14OdHbSJ@SLRP{KMIGC$pfrY8{YwjtvvRmB~G99S`MhLsfcey zomvxSRIoTYJ;Xrpx90_v`-_EiSqy5mZ{I?O&NFm=FVCQW&M#y_xxJD6z#Y&SR%i98 z1iRZw+YrNsAzxi1=BP!R@DzJO!tS5u@N@e7>fe#SPOQI7WtwC1ps)GAiAO0|%drPw znvkAiVq=>zGJ~b2dr`egqv99-Oies)R?4hgPDPR`B!g%;C{DKWG*BI87RWO8<64Dk zm17@DxwDy2Y~wb+>#pc->T-_=yW(~pL11t1e1D>Fpv~s&OJu<*JouUEw*qh08aAUngrlyB4d0uuPM8fdlXG5rLX`1P>M)n3K7`^n4&Q9kyIu-p06GdS$^VbSNxKkAuqcMEt zn_=3LJ)ipA?G2ohkny_npAhu_xc{of*x#Q5GdBz}e$Q6ka&=#AJl)yei6Qq5frbrt4%vS=2Ppir zxZ8n_+XME#Z8{rOM@ZnwqQwE`ta1(MG@j z;sQiFr!wuYdfj|pYDQ$wHdco8{dN=nHYJ|mQ=WLf-Ulnc## z*n-V!*B*pXUv^ZY;63WYKo_BnxkOnI2OyrXgfACrZaUw~QOt3g^XT>iv-Z7DC-TAS zn_kn2d6H_t;o-8=8e6Z_JDb_+J^W73Y^|6m+b;B*(%G#vS?=7Ps5}@qqn-1QDf=^B zsQU%y$_O8@38yCTnG|;4Dl6iIHoo&;b>?qm;!ocBEP0QbUwKESi(1AoXU=O<=u(5W-n4XEl2g{qmY3<$)@4b<^VP$(P`RExdtbOrNtPt?ROP0TDeHS0tgbe zn#rNXZPA&REw2=N3FoNZ)~VR809VxII@;*G<6-kp5TIM`PVQQ#xL($R8+DLoEse|t zG9#IYCt)0{3ET$4Wck&mNN=|So(?J5Ig3IQ>pSfl^pn?AJ?9tPW?KU0Cpt^0+lHPM z!szAVtp^NpA$aO-ad5D}pw8w}-&)UDr8sJSbPA@%;k8u!e}#Q@P+M)cFIu3u7ccHE z#hpTMcP(DrT}qKsDDLi7+^uMFLU4*(A-Dx6z|H%9=gyp&d(PZHlX)h4XFbnev(``6 zt}Hh%&;_{AsDn#>f2kJpQ)$LWR;Iii0CfjckRrPwU6?Q7I?SLauIyPOKVUyrc z8MN^ERBMu64+WT}qi{{%ehF@F84dl?>4UjdTaXKNn?Ce1aT#;!!P^$KuSH$KCIww= z0J$dMgxT0B|8A@DjKt-Pc(FyYcbFW$y7Gq6_36iN)Pc5hqmENfQ3f4~OwdM@ajFtn zHX$B@sgUIlQcIc)UD4~Z+LZwKFx&&ZStINktyeh z#4BfqllT4@EKlt=7f5Y7?%4&2ZApjP^yEP#6MbJTt~=`!DpDD-0u%| zoJ_s}PIOkKYk#;%Lbmj7B9GSNw={=wviKgjh{hpTtrQz-i z0d&eKdL=A69}sciJ;Xq}vccA4$WN)6^)Q1l4uLSd*C18R<#_!`%Kz>Rc-~vL-S-9Y z_lVi=&n*bkHdbUWhV_VW28$ZKT8IcTh!cS;Mme!Fcz|~5n3R+gnxzG}1kVfvOucf7LjG?Ux&So=hOY42*fiA1T8!C76YDN3 z!$sMyPp}ZOEb?b>+#c6W2)de6!75^l>4JXQk00?!NyoSPl6L*3MX1?Be{bFqMQwwKR9m-1_jrf%4 z^}t8INT9f;k{?L7LMUPrrp~YZu!C||MM10wU5+kUA8S7Kq0My5%+dEJ*Nd}#1yu0S z%&1oESIJ$ns_eue;rXO2`rxsoDm3F)b?~guXCN_T3c%Lv^m(tcOi{sks|vKh@GvNR#V@ zBa=&ek{D2>b#|!Af9d6H?0+rf_QC3mC;IXULqw{AgQXNqj7tLetTNsFf|>NJ0XN?8mb$Vb(c%g8nVH_LrX;0VJK)6_ zCawgO67-fwiUMm*M534Ldq%IF*Uw9Wp0Jdk?u%*WCZttX%}%n+#D$vaAN}u&my@mdQ1D3a>#vlrx_4aMrHr|R&iV{HX^GwZ zG~ZKQdPlD2#POC86cqqQ?%gRxEHH&&Wy;P6pIa~nmC&1l0yyvEV8%Ajqk(FYd?M54 z$?lCcS8I54yGOdnGPMMlv%RVX!{Vdd?CQ0sxW)T@on+sm^0~i94s*{DmCEn@zt7CI zC70eMdcFx&)_=0i;GxOVIq#mhn)BvLp>_lE(yQQvE9x;1fj+nJI1HB~BjYeJ1wJ4q zrbSfFF^*bT_cm;c-ta393^x*rhe{thJ=(O*47Fz-J^)~vnBQy$K-%*SRij;RZ@!yE z&q&!nKsDaI%g*X-;Z*DLwes4|<3PIFFXt(Iu%H^35=bza!c7?6!&!NdL~4SHmsNK} zv(hj1ZPH;ROFb$Za|0b==VUnpeUhV{l5avdObI@l*@GD`t`Wm#iO}e8U)V@r`=9`T zd76JF4L@C%&VHvdW(j?G<>XxOCKam2gv*)}!)UNVW$#vi2WRGsGCpVv{sNMueCKZ@ z(h&Xz>e^zEUn+^@O>K6>q+f^qxO)zLo!WjKhs)LC?{Xm_H=+Q?vzu;)TEcLcm)cc` z;O43}LGQ7cL-*%R<6ElGV`x>_s|a>T803O6V&AmLFn&k4Yt6%*dt-Jr#gZFpajy7^ zlHHL!6YF)reL*SxvNkF%!qk&A)lAOQnymkCiQFkq1%VrkR{tavzn9cbpTz{-$b#68;5Ap5@ z@Sfsf$gKo8=DctN#eFl;oAv-YcmT;`8Rp&5|3hq0_Ql(tGD*IKA z766tZ_&9$xPN)d8y8N>!wQ{!Z0S(rt$o)GUBK_a?i1S~Y`Ty^k*LlT^SA3te1)D4} zu>;P8?msO5{G5Trh|CZEk_fu!yOBD{=1nA#H{`iEyv=#`$KsCmVhs!kQ>XYgw{cbF z4RGaYIV0=eG7NaQ{?Vr2%t)r9l!hkk&UEI}lWw+C^a1_> zm+$!sVGyA0lelnSn0oHL$|XL)wI{VENUB}nzdk%Tn&H$^2SX}iU@L1HSBb-`e!KVw z)1cL2kf*No!0JLBiC;A|(RA%RRVT2!Jc3j^z3BWihr=kZ1?W=yPt*w*{+ZtKKPA-v zZ8!dYIKL%gm|gv^pfCO>=o`A>j3!kl6hK7(C~@;JS5Gwk96(@WDS z3Y78JshFvOd-pYUB|;~c^X;=$^KXFOL%pp z6|0tf_>rFP>39+O`BJKXyjqN}<(B-!+b0To_hH*@<$z~2?UdZuEmZh!l#Tp_B)~-c z)(Ge2O|StJmU-LvXIBHKKa@%h%jQwJz13Uq>3$mXeYsp$5|!8yjH`E6&Jj-t$adO_ znZvYSGL@ecp}oJfBMSH2d3ePt1`V&X3C>0KnbadhZLk2B$HwASlrjVy|FxYZd-cymq1ZriW0=m1og1F_$0#$56r(x8o3Y@M;r~1piz8IT=K3B-9hv!ff)62TEzUPy<+ByyH2cazcM;e5rxOq4Xj< z@87G{{=YRLp^vSW5YWe^2Qa0D=(oQx!o_gD6Ga8{{T<@mjnfdSR)j%W`xHvrOsGkU z>62V#h?tp|J zZQ+(t4^{dZSN*x}Yv+HeBTX^Jr;_c)(xe@sj@e4VY5<+DYTEG(xTexmyftCSLCqZV z^Q4K|jpWKRJY$UU?o$`+(0qr~?J#O%-H=O!T)P+t1wj5Z{o)o*5^C8qQ-1M^e@Y~g zstAK+PL}Yb=5#Zgvpr%ENtn9a1906Grtu&6PfpcYv#%o$C7(sQE>sY{yoMp8gi2cE zTjP>IQM9V3R1m8M_mCQvB=HOu%$5bq4aV>QTO<<#VVQS|M`n7)aIAlu^|0(|-og@w zUX-KjQVy#&^-#R#Mk0(XsUU!PvRO**0gdg$Fwk>B3zxtAQ|*He@9_Ss>|1C33xkR8 zgKnPAh4lYabPv{-tj70>k0+^CWjCia_V#ITm_zoA>@Nrb*o;Z0_tr}jr7c4hrC-HLUP36B(7n&{pXxDky5lsZ2`eD4>stQQUiLqPE=u#qwYPY|##_Py5C*NAC zYHy$v=KAAuA}?RB5kBbGM*9>8Vogfo2Y!?WvL#-hkd1aaO*kOc!Nf7G3-~1aSWw*+ z#dMf}NV;AuIVC4)m@APx!l0hXfX-G}iDXZ9;2l9Y063JD-79f+BW*08=~%0D3=j^R z^fwk@?7VQL(P0IDk3b`{TL8nqP@n%@`0&Dpijx%VJj(mJ6qV?cv;D)1#Lp?M@fCU1 z?Y)sSTDpgVLQ^q~1qK@ZY=Vqt)wpu`p^8%Rr-CXEm_qqp)jxjP9IX6_|EDL_o9?vr09mxrmdyV$j! zvq^Y&yq$UW%z2Rp8rY!@Q>yXT0?`-QXEx)zM7nCPnPwp7^hNYh`c=CDH(%aYgZk{< z4$^noytiMyt(+ESdV=1AmtH0vCU#LB z{s59Z^{ePx7637A5C+4>z8L_0j#Mx@nNfojP{|Dk9rCH(Z%pSjoNx9GnNfK5u#!eg zvMrBfh*6jEN543YHGO zh%Ozo?S@RasMAzCVQ=`Z4P>bW?=N|%PA?U1UQE5y<=oHbX8wb@y#_hi72SNI+3o2) zI8zy$nV&EoIJhKM#NbXUc9@my=zDC+d#3#n`$sGg+q`1%y~`8!`=z2DFK*86q;EfU z88`fzh~uHH8JBgxck7KoN-i7YVwl(s7YGF|l5LT!8KRf+I}31GH)j(m+ik8_rxJqn zOPBuDjbDwfWH)gZdw650c64IBTs|q>3Cp@Dfs2eIcVRk{H(BHNxY&?EpUwJDLvQ_W zj)vN|vAG}jGDF$?Lzhr{nS+P-2NS}`U{;A#Rtw(GkNu5=J)&4$=wD|-do(OdK zJL&EcR27A@(@u*)zmM%L1^cHW*$+Pn2HN-VrV_s9_LCaOEXc#*6*_f7Rt9maU$5oU z^1bd7Jb%=Jb=SFVsG2RYK3EQqZBW?yLi2{_pD$@_;pUM9Tzd3BZ6hXNyotq%S}H8_ z{9)+7{we`KkJRc6%Fg#pOeWFILBU(k=jC+gW6of(z%z!M_Y|lVmF-3_2j6E=Ko=ka z!iy{8neVAq-EES$Q477ghELWAMwTG~87=SNJXHe8u|bcfZ%7O5Sw6IWp0u6YfH$8j zQ^D}x0r06H12`xbIkc66wV-rb&EOJE`wlcFV%-4M+kQ!H?e~WxjtbP&`m-QnfUTAm zkAlEFBb&OkEgCi7uJp{cfXYzm)=S*M(ac?T}N?9!%Boyu`ulC-m^!@C=Gq_v~v27 z5@mb~>H_quhMHI{wtGi(M5>eF6qJGYF9Z(3hM?IX5^GV)z0yK^QtJKpjg4z0Qv%tT zN(x+AWz>aVm_nA2U>o|`nZ`XB1WLU#s>AMVmZYvNIn7HBOmb*0vV~y0JdS;5osQ&Z z%OQnzMgdT{THKGAwnT?Qq~H0IW9wauZgBGTBnvT;0)90&e}(#t`+rgrE@vI|(Ng(g>2p=%d5$wsmN6;4gm}|TazS7jd+cZN^X{4R z?!1RU8i*Pl=BV^F!Z3||r@DYiycO9h@6TZM>xo6$lX}-s3fw_9;SRV^(QkPiR*6ghwL^GbgOMH|LoYtN zeDNM=bKYu$u#8s@IySTwn1Qm+nt5adeZ=R}62zkZoWt$Sbn5Pa*tW!6W3(81G^cJbyz(8YYn3f z>T}AHA1`JRnY&*mkkcmDCYuHydCS}I$rxhCP%@wW0);457f09HvX~(*;7s{P9!RC6 zr{wMOVBqT*HWaTMPW-mFI|cGP$FZ*j+nH8D;FGe*WKO1tU~wqOiJsi@W^6Gw*oUAa zlVyL)*qieq^#@5-z&fiqrFD~eKZ5u91~3#J@jiHpHQX@lPxmv9%kz^1xhitP8pWp> z+G+t7x?d{9>p&091v;{6IWMWWcHCtGWIMZCjOHBlipl_Wo=?m-Dd0whcR;C(93lM} zvFtZz5B*52%O&87y+4iN;({6K%c_QzAe6Kpj&EtgjjKE8oi>+V(xv)tl(W2P158Z^ zj@BdxeoSfJEs97nNN%sZMlsg==qBB)lA<22zA`t&3D0~*V8f)=?L%n_)?tL zJ8|*x2Y2HoRG;EZP+O27OFg^L&3VqzdYc#o-&Ce(?!4sgt7@m=%} zjWs2<%Ano+r9vG?vz=jj6LFG3M=+x;hS^7NsHdt-!@2HtAy0eWoNmgW8B{X21>#U;%4Xk!jq>(aQ zq&5tJT_aB^ee`onqs8DG0=eyMm!3k0a@{kBFL~&132SLSs}5a5b4YM-_JnkB`n=-u z_j4dQk8N|)2dt6uH3akcWm z;2@yg6@mW&n-4&Q_+A^4a1MeW{#h}NK1Y=pHZbthR^ONT3XMUJM4sr_SUPTQG`E_% zN1P$G3!}4*%y2qDj;L1_Co#JXY*AiJD!|uQ5C8z=B}cF*6G-&e0g}4z#~LTAmp!<$ zW!iV+L_~=pu-VdKB_&p{e@X~z@VD-+&HwFBVn|suWvi}lW9a$RB}cJ>fbx2 zh$>as@Xk@#FiiPR@mE6woSA}dG{Pp!h$=KRGzyBUM4aB*w1hrwZNk$AcAg$6&W;-i zuKcO(&qjX&xuU}u3A}C&uOuz;bVpoy$}heX%3_tPfGC@r37)fyOHM6MJN0>FRw4Iu zr$SFREoD}tPJ2jO+LK4yE|+a_-LbGW)%!TzL%5zm}EF^rsM)qEM7Y1BE8m^mPU=K zh?ZGM&Q6v|$Etx1JQa$H|9v$WFG0+9?kdPdiXMMpQv}>f_tk%jP9F&#xpegDPe}LN z@4zJc=@g2sH%4THLRIT;kyM z7Ri6(0$;ur>_cU(j5`zFuBp&KL`osYgJZ?L)3{#{U#%3LH9AC2+po#7c%tNTJ34GW z`H@&4`&skRh-6hH-G2|eMiV?Jz|?X~Ww{SNxeJGgAPhlR!60wW(1d z4vUT$|6hCW(-pvEx=~UY?yxSof-~eh)W4f6A649^Xa# zl^aejQ^<=E)OKBt zpc@E*6-&D|nBPTg4QJDv#a$)-0#& z;ih)kGWm+k(X@7-NJK2SK4d!n&6xai$F{$j4?R-#ML!s5U&c;o>6d&mVmH>m6+br# zj3tTB+u3t7!!Jj8*J1I;RtL1|soZ42dZ1Xg=j_fqnl0B^-Z5@ z?F}oX)Pm;{C@~43;69Jt8YwownT-a&io}YO`9S2Lr=kzqn2X(9vF9MxcVyQ)ho->j z4cA0pk3$FtLIGp91=)(=)twwIWX)nnl%oegM0I+Q?Uw7n!-*vluA&$7WR^Xg5ETLj zJv0o(d~?9wnY=;E6q05SmNU1PQk_XAW9;}YD#}eT>F`e#P5&blwhrh_Z z+tlJW#2mjXI3fP{Zrb8)om@@l_Ccybk;AB+EzfAZ{A}3-@Z|r}=Md;k_v9RWQuMJu z*3Q_}gUIgAXq#4MFYV7!=9c{$3b;KwvJi=+5AL2cBHqK?B~B_Bzx}Vx;R&Ydvc?Yv zDcT{ms~rydRKv0*5b!L?B=iB!V99G&U|m3bGxw_y4<4`gt)qA!e$a}B%Z$bjoH64! z1CJBU!to7Y=PB*G#TIU}eBrnTCp}@SVXnRQu-YN(W(9QkcQuQr2 zf!|34i=Ov9aR9U0Y|af3m*j0FU9}Q8OpG*i7IJH?(YEeVB^td?4M{(I)~}$Sp4A#f zwO46I!?K1qR#v-$a#v3`!*GaVxg!@etA|1#J#)EtAUs67#_(y9u zPC42>Ku_87>jr1Li7gpaK51VmlriIm%eU@jepu1;t>e*DwP3PS?>}TQt~?>v?smr8j+o- z+S>{)fEnC~P0PxL{=zWftIb~30=bAcdFY=%wC_zGXXi`AP5|VR@vFt(3^K#NCzt%m zrL!pTJ1%$2l~76==_u-P3K_nx=Y1`uc2sZ=GtQRZIG6N*lO@)q1~_uicI$#s!I*nF3sIB5vlw8O<-+7C=~Ihx0?pMdOfAdm`fL>rR>~F#i|xS z$pF;c;32f*XARf4CDae}vqH>nAwKZlbsKQ$9vvPcR)?p5zFNT#|5-WfqGnLD=uX@d z*vOV_(SqA#!Om;@5-!Z103{lg!V{Mtj1=g38ZWiNi z0?v(dUca^0aN6?N{X(QtCY;o8YKs(4A9VP-)>Q)sN&}G%($G*{v84}lBX2+;U#~~= zk63JWezB0v=ugWsWz(2k?Y^q0@Lsb*kaoZ! zsZPpEP`>0elXk&k)J|71{OEkyHN2-gbBZ>w2jh8y_5a~{?^H? TxD@Q&C~yEdRoPl;^RWK|h`VSd literal 0 HcmV?d00001 diff --git a/doc/sphinx-guides/source/container/img/intellij-compose-add-run-payara.png b/doc/sphinx-guides/source/container/img/intellij-compose-add-run-payara.png new file mode 100644 index 0000000000000000000000000000000000000000..52a301f7ed58ff6f05ccd9ad99f5293d0e4143e1 GIT binary patch literal 14908 zcmb8WWpo_PlCImbm@H;yu$UPwMvEC+ELqHCi)BHJnVFf-?zdtgfn9tFj_0-+Ur6R8d|65e^p)001CLNq$!X0KgzW+u1NspZjs^lC{qtSUX8g zM*!e!?>`$@{I{=o001#S>btOto8H-)s}uU%%HOMt(FE(5HMz=J02sOmY} z!IG7gR4dhNpW+`p9&KKexyU?}&%mGQ{U_t0+0J?kD6o<`&VPZUs+ub|L7c;Bb0AXi zHrOd8R^)$~>13-77sHwT7BtzMK}ASN==QXbeO86@YI|=yJHM=O8WR4|V~lg|8>g_5 z($y%za_%hzn1q;mx(Y5npj!exy{H86zV|4v}s@h|y_O{i+oB zh-#!7H?8&5_Xt3@$s0W6d%I$`w5H9ox2O6$AaopGyDB+C%-d)#88zkpytS7@%YqQa z*3{IT6A#pBEB!GyQ#>d+uooAB+ipwjBaR3eY~iCRhDphb$Ha3mSDeLws+CIw)&Z4j z0Y7xe2Y%`3=%C058_@+45_C=bNk9YRsvDY!L?*%Cl`RUr)ZrX9c3CB#ZZTiRP2|Hg zVd|EgR68^dnb;;OgEgCO9~(6DJyE%vkv>?`aRrzS}x@s zwpSLd0P=FI^P$wI%dv^2SOXd@Vd17yX#}`1Os}Dti4K4wGIlqQbAd@Ihu7Uxp%@kY zZ`{n5F$kalKvY*5w)S73Cf9;pn0P+_7OtHrHY#)q5W5#U%$MaCz>ELb9{ zLeY}jH1&CTYM!_*`OjHx|FsN|2V3iETEh6Sl|C(B{glc@NH@{oMun1(KsbmaM7Z{V zC1)j5R?itctbkCI60xt!0xs9L{d+XI0qOB1x25a!vHz`T#BH`gG9?r`-*>E zX(iRSF~2e>RXgl670)H?jL`KFct%kU373q&*tpVjMwSI7Gru{O(_fp;Q8fVtsn+_{ zp}PqLFhduGOzCDXL}uk+zpvw!i4X8Q$6kJ^Mr(4pKPu-2L0i^hb*$%)B@K8*r>9QD zWCNDexsVC{6|#s}<~{1?&JD!gTXuEK-om59Ki+fhvN-h(Rx-^bl>y2K&|mfX$^IhA zMfbsXuc;ZyfWd)_`h;yYeEIS#rfX}D_32S@{S3;6-J_E^`09<+?JkCjP>+P~P7OxP zR-Al5|GTp6N6PRTnz0u|wV3=x!XoxI8;M(!WE89c<${WvW|l+en*-dSK%Ltwj<{1f zNPs`+WFc3vn0UyiWt(~lXtK8;)&(4fT*6gUInQXK_>QVxS8T+reNxob)kYbW*DZhu z9WL(rBJ5|UEsY4Lh>d;y;Va=#QEPRf8pMt$;z<6x_mSR<>nDntv~>>_3l&Aw_8tfX zmvg;5SyM^0-V94_O3lS%G%D4oFE8LcNUV1lGZv#RUd_&dwF`J2vyj&Xg!%bFQI$hb z{|X2zWmT#;WBFq&FHO9}3lL=gzNZl^Wl1nzqQhcMMPC~i#}fQa*sdtt#=we++Eyrv z97i)DS`{~ZPHVbQMlOd@hxBdrt4k^`bCHBx8if!CGK&#qE%ZWR6K}$P?aEU8T841@ zvBtp^)U?T ziVb%Fn9VAFA4Tw}Sz|44DZl^oY?s)VI@P1AeG$_;Sh|-;lz`SUS^7X;QXF@?XGT64 zYE0H8ng#CmH<|r+FEE*}UV;z28J_N#GEl(SAVRrY%#IY-gf_H;atE?UpYz5xE_+J- zhY1qxag zZ;9^2mhx&*+Gh<<&h;e_or=@%;(&f9Fwv~0P66|%hE-Fd!}R>PR3nf0r5JL{mP9pN zy^vzo18G$Shn&(ZzS>Mz#L7t`F;-SuZE2rcpBuoUm5W7~;;GDKI<{}|RFAcc{wL$W@Hz6j6fBu7i`jd)~;txQJ+|1ZC~%hcKwLubuN58|u!rrKbi1rsbLcDf=|$>FbmHAfe)f9;@fRxtJ;)x zX^W0IAN>vz=udMsb7VcH^bbbZi&xj%OLsMmPSw^DX_|g|dD=X%LWHw>_~Oqs_>oGx zy4TYwr!gSU_q40*_0i+-Q+y3#Gx#DgHYP@7peoWuc|*WgkSJUn{0cU-mx|no4Hr%e z@u6E3iNE)A0}5(9Vxb6c5yG7;hDyBL9i-YOuvjm`Sk9F|{t9k|RxMSbpyXKo8M(4^ za1t?PzWEwBk|awhONn;CsMBxGws~-3Blx77y?{4)f_U#A=!;_iWCwCOoelNzez`es ztqqBcq!eI?Ow`Zlou&xG=5mAawR!S49*A6;aM7NnW&g*FHN$IC;F9(Jr|n0q<}~Dh zl`9ZrU<^-n0=@Q6!J<_HO^Wy(N%s+&VF6+QKDS2zk*`*l%#Qy43sHyhTT4vrazfu- z>W_PWfRzFD&ott=eIj_cJ_5Iq1bVZ_%j|pqk@6;oa|{VEu(Cu1e$V?BD==Z?bBw7% zJ1pnp*(H0S%S+($iEWkh$-XoWau6b3WS8AZm<^PFv|&%s3KAwQHwyZB&X3%T&2(3@ zbzEdU&vg3KkCHerFk&(?e+%WpB?u`0uEDzEnZZHo`&JzhiaU_5z$;#{3awF&G2D9t zEX>J;jSv&Yk#*aOTDcaiq--ir1S^UeAc?~n7y2vVC?3^oYDktHXcszQ%=fJ?2=Re~ z7|`2Cv53@Cs#gAt^Q*)A1?=*qvJ)Kv?Om5vM=|72#cKQhldC)(CcNP5ldmQ6%+&TC;b=J&*e@N zCvN&kV&4v921+fj4_7L0s!YHgP0M#hx(ew`zIe2ahp*Gnc|UzDIzM!Hd%`C+FkWpX z^+)=86*#^=KI3!r$JIgp$+qdc$>a-MNa0MXKQ|3sQKZf=r%{1n2yi(Qq5lAgv^wXt z4EwYZh=p}6Kyy+J`Gl*6?yw)Wd%;@sgxiDr(WK1E%{T9yoJ7{1ns>Agpt}aWdEQsK zP7p?}#$8>xkZ$$r-JdV@tb0>6C##MF4QkMS3UoB_hGpKR+hLalb|wO?g$pqmboiXG zANmQ7R6HSN$ihs`)VA~*=?PL6`hu0qs1^egAV^7^w7quT-URoj}%V@4FCeahf6Y7$nFYt2Hc+0zg5h z1;93{rNho;m_5a)K6c)55b&%GxJFYALJG;%OB67lBx7Pv=Y(>$&Xk%lEYZR+&bUdS z$29m(%x87o;&FzHqG36(%se+Yi?H~lQ(m11+&ATmqa~h*Z-9Zj*$uIozIucrEW8{& zxXGnOp6;DU7ERex_XMfz@n)O5s4ZivZ(UAAUhOEtBD#U`XU=J&UrvS%og^0UsU z>>amf8(|Tw`rEa31ctE!n|lrN-}OU8u1>~s*YIw}dqGQ?HtB0dKZIB&&39MY)(6rr zKic~zNQvW;&CT9sfVRsnmI3?SKks$9yE4E--5M`sLt>rlfYt+5(PM@a!F;Sn2nrD% zDGx}dTQLjWj5ck?sI)VhK97eI3@>*^tC#&vk_H@D+{&$WMvF-fjEQRhU@2Sj6vmy1yJG$oc?>1yqgP4eN% zC)t1d%jj=~7G@qiRoh-#)gciOnTN+#L+WaK+Sxw=Gm@Dy%=DjsZ7|ZhJ)!HtXwp@% zVc&C9J~$r?npNJs6D{0)DO1EGrGD)JcHTR+=#S0inhkOsH`{M>S)AJ(PR~LDRMW-T zoaBKQ&u6_!U@~7kInPNrE_&6MAGalAzK=I&KAQKrw|TS;9u`z6?p_NMtcLgZTPvwL z9||~e^$=ai7<+%^Zt+-ER}PH99ebo z2FdwzFR9g8ScfjcVx@8P@$01nzcXN|d-uAIt((olE1))aROJ&lw``MIT$XP9nXiXG z2x~``fch0Py7d+<3F0iUm&ClIA9%9d154wH$ojF7RS^zwM(B3%q0fAELpYIA?Pffc66ZJ(v1%lE#c+(p)>14@KsnMO zg+qPDWxtHxf-_(+@v;Z1uG4>=F`4(erl;Qr8IBq!7jZ%U;(n%_N}yw4Nbh}VE#C-> z4`n#VHgo#yCfkj zPVSrMR=C-!_)h1*4&^hMW&kLM2Gc7(eYS=hVXbpx$L+^seP(*>Vn2O@{5u-2uXm7M zBs}VVMWg5a%A%Z*PFl-{@dQFmMEW3hwxjAUN#SZfqT;w%@2gmAU99({D)n8Gk85Bi zSaI*^JG17m%sHwGMexnSpy&B&*d3CqIUy|d zX(#;8y)osDTjD;O#e6(f!RUUv=EUT%dHMpXcZk1w_-yGxPv7NEo(3wH$1d&rou55- zXQt@Yo~P2^v^2?@9I`AQGTcD6o!=-EQ>_i`Awxb$%!d221$=KzV|eqqdhN7slFfPK zSOOZ+sj+=yf*&BH`2ofD4`7DX0gcaLqp)?UZ+OrPuj^mI2d4*HI(Uf;oB+oR%r?ii1R_=>BtLE|F{_4!BmG-;l2$*y_f(`-HwSh9wUr}|n z2>uv`zJ@BOrS%jZkejx@a7;9e?M<_vu>aaSwiI}_M5=R-Y+UzFEEe?-UB6_`{6MjK zsfvjij_z}O+>e7N&`*=yNJ+^t4n$p@ndpCdf&SB5?ivx$e;9~2)LXdAW1f)6(eVnT z;^fnj%Ofv{_2k6Tfm>PvC-N6@&_UI70TZ8Y+B`a!b#WlI;NA-t`11b#yI1RY5dqB` zEC(y+3^1rwzKLq22@6^N$F+o4hlQoSp86m_uT@u< zDTf?@s zCwZS`xNv_MS-xYS0y(sqTOyvCA^hETc|6c4CLCSqN4+!SF_rCB<;+v4l1Uq`Ga20` z?#ertyvVwxRR6Qn(QbdT^GlPSQjqbIe~_+ru0UHfnZ3y(Weqf9t4nsr4=8kWM-CcJ zdB@91ZdBX1*?h$=)!_ucE(72#n!987UfjG)ZY%O4ATHBc69;mB_7OeQnLtBALTY$fFA25BK%m%HzuHF5HhJP1yn?%N>d#xE^- zqho%0LZU!q5T`Ggb-GrD7#@eZ1xCeKFyLp#fI;cEIpOpUv@3v`|1JEg>16%g$A9sJ1u01^s>`YB=N+O4LW)%4ks5tmBT;Bj@ERZY>iqe02 ztcq)-XZhi&+BJjPQy^?HnWZQA-2H@0V7z$CR8S7SMEwo)@|sXnx_U`G1vI!uFuGnCtABj@KlZi!CRMes_m5G8mF{o^G*C5!8;;vU4_{DQ$+F= zUHV6q6{6uf-Z`Dmv+>tG^Wr6MQe~(G3MvNhT+rwh1?ywy7$>vxa0^e-N*u=g#+@gMVO!fqQ(wVNw zKZydkq8m0_bnQlh)!@f15UGVMTSqV~b;1ZK{D~-Q@4)L4JPo!xY!(aZAUS31w-bt* zI@LpgGyeVu!J(#1FlPckxk_gHQJQI$cPHN+QSrN{SZ^89pZvK8;;25ulAS>vgJ#A> zQ;!0p0|I6TBn18$7T*!)elR=-OisGR=#TIF6%hJUA%3s*K+WaUBh$u{Y#ee-Vj_;Q z-}t_t9_npA#}%=Vi8q#vfYTX?=598P%^=8`$!`(a?e}jPEM$?@5Bewe3<5&omM`Wf ztWML&w|Iq2*9$VhV(`P;Yir7gj=>wE3#z+IfpO4Jak4k4hP;-Urch3);&nEQter~! zB?vYfzpF=3+9=r2;%^kP_NSda=CZb*;@WR08E$zD9on`I%Bn%m^_45WI*=_<6~7$D zuU6WSG%%6_V}k|RE#0y>Jhx7C`^QW>2FJ(Dh7^CDe#3_Wbx}%Wbw(-p%kpq)u#^-Xw0RaT@ecOq{Vz;7I5Sx+OhZCcMpyUHG zDNa@m<+1UmC$+_WePL9|Kz1x6S}weEdK^J!IS<#}BT-|~=~PTXK}z)ob2YH~wSQP~ z;0!1|20*Xz=+db#1PcHunfiUu99Vtpe`4Q|`Zm6yE3BzqtgJkg%p_GVrBEu4A2Pa}1gPANdLq;~_<91d zAoyqWlBt&n?(hq&t^ielse@dgWAQ3KbH4<#Ht>zUnz`3PN-Z>wG;x4P-Dd)#jb%9MV^-e)+f=J&KJESom6N|0JS@A0n)p;9Ot2 z;;Z`^#EFc$M$fl696j$3os8D+P#O)^?p+^^wkv8t*`*>;s4I~~>x4FzKjqcU#L$zr z@{Y$}G|ZPnAny5FcILIE#_? z;R1kyU%YChq=(zsiMe7N({(*7S;{Psc~X0(hSTDN{v-+t&<=K@(Y-uxrKbuR1%07j z<#URPqbYYw^3Xxlm-Q%%&RyEExKRYuPh1?t`c*E_=80QxhBb0t21+YL+#4rx*wb+i zj2n4coM_4R9CVh2pKbox3&z-pO-UHGs+2SQ#9psY3q*AK?*u);NG6|$|1^Ey@1Q>q z_v#l4qCPJ~`qt5?AC4#$6TO%70e~HpBcoY^7{tco6B*!fCo*N%Kd_XQ<8iV2m8zyN zmmrEg*)jE5J_894&>{R!IKVr}UfxE_Ay>*9TW^=HSdi8|f`;NSe zHs`%rgzWYSfF+^9kmON9oy>0D5L^+DujIYd;J77xOi9o>Uu#Z)S3fEZ@JiUQOY8F# z5>vkG54Q4E84I+g>n@$)%$7#he_a-8L2!rZ>9D~cIeBf=;g)R5hX>WmkiVM4{n_K$ z8qhsl?_^#;!l(Q=adwD5ZxeXn)@m6tkSBW<|Dss~RbEndLW{0SqYYPFOhc#H77~g^ zb@M5exV;XUK>QrP5b@^0CR?2GeBPg^O4KXIdqZP2Yur$Zi|1wFgUQK|Rdk*vak3j<@Dc!e)6C`_vLf;wWw$ zX^u~3!)dJlp5%+eYgE*lv<7Yw`TB!S1J<4Vi1rz_4eZB~NLLiAlZND*$B)kRSso7H zHFitxm9-aq{~h$N$_ENe{1&CTwZPW9M4s#G85lg&4f79%>+G~>ZcdCq(RJGj%jZir z>dI2sx!gutOvwV6XsCCI=f5Wa|B=d{D)V%yJ(w~AuhyrXo!aSQ-JeVXG+khWmq4ed z@;Vc4LYm9&v6U@FB_OafA5@hmT#m!lh`{r#HLrz{7eaa=>11#``^zY{#(AFDDFZ49RnNrZ_!k) zy}fS6J-)E8Fa$KT(RAk`Omz{d5Bnit(2H zx-!h>jrF>=2OIxzO~OR{YbsY_yR2B96#&55{-+k;)3}-%W*8;|xqtxSukmzVu&U<; z_Po4;?Slgef2aqn~8o;ui-Le1IZ*Yy5 z6|Lvnqp=FzR=EAnKyOMTXStv@K~PsZ=jHx0xsxk}l_Sdi9tMfk9d?>*Ucr9nP@#w! zQ`WdViOgP*Ydo=~h1aEB7-hX=`X)yW29?&Zh8eVGnM{@oL%C0*sSSh5vfI{$(vhEE za4T=_%`-B(7zryGIF`D)2rh@gKqLX1%`$rWI4mHy>+fW?XqCq3_q5O|Z6#RQ1ujqK z)JOrsO{Wb4Gn)?j48t~-%5MW3Nxh)N(IU%xv-{D0Yjk=84WyDaXg7peaoMkPi6K@Xm^vx3Xaig)PX^p{N`)j0PXrbM30k55$AzKb&? z5a}6L%oNJb`wTZhQk5r%jKm}_320X{RPFL4ehaHlW{o^nm!98ulB3x<3qv_?mc~Ly z7c;!8Eai5&#l~XL0T^6uQ`Fh4G!PR5^9pOtuat6fbN`HM@eReBl9@4;W`3bH^RiMA z|lRNY~}bMBnbQi3l|f)Spj0raehx$BrzN~7=1fKg)^-vfAhOLTi;y5 z(*7}Lfyla`=h%i1SBv4BoLH^_V%NT^>d01k`+m7tns8E6m%)y6*Kyq~rLLeCZ^kR4(BAEo4(PZya ze&ClU7Q!(BBfF26#o9-7Zr7yxW}UR-=}LV9QkV4ncVm!}*#^rvW~^e>?C9v;pqh>m z=E;hR*8Ki-e70|WQ?);9qfwc}vtL2&yeFW9ZZH@$7N2abEG`4GHhGb2;wSr7Ov zfm$HUITU}kGOldwAAj_T=b$cPx74aL|GIcjqCsccp?A$5DmCB#vvz-s2^&Goj9zs- zS=Eb3FMc4XuHU@c)Mnh})F=lS((czH8L4wO$HFP%Nh( zUDo*wbX)aeN4DcT4=6;!-sO$?wdvx+t@H6s691>YME#IIe;RPq89w{!i6KViFpYf| zh(0Fm)mV#>_~}aIeY2Ij|LTl$NQnXS{I8!0sa2eRTJ2t+S={yiL5fsHgA;*NKv8|>Q@_0-Yfg-l^B($#^ z@qZP!7>GXaM)@@EGK70l?TDq)l=Z8B%qN5x#>74`G9AwKiFnB5L^MhTP5^Pbi!|D9 z9Y@);FU;2O(v|k4S~*49liVG`B-dH)Z=7SYmM+?5MP{f@f~yOJFm?3s!h~`Wl~4FxY%eT1He*CYYV)Ve*~lsK>FiW$D{m`Yc4pL*`XHLn8ILyl5tbbc+5%DAquP*LvZQ(g*qyq^>CU>O$oqE3-O*e zZ_n!@uc(EMWivT^c*#<$%h^(I_l6ye_2sm*M%e*pMw>PEafV0FBgs~jzOh^D8aX)W zan<>Xwosb#L8urN?DlXjlTY%MFLFT}9iQS~Z19wSrXX=bll$P%!%h{Sq=E+5XMMs3 zeMfWf{EqgnDhyUuyu3|jEY%Vh;u}jl>oJ-7$m)SQ9#2uJMd-+PP_c))Ukq)d+TXsA zM5e5M=~?q-aQ2uxPJI^7{XN(!soojqK~EW#Bh`F*bu8tb|DO0_Y@_QceEXrS!UtT- zG{bQI7#KctQCL3!>!UE3+JP`Kt7P}<@G~K_LOPZ@9tZi0 z-H(JIM+TXpQf2BRPZc(+{n5KlwVQt19}Sh}ymNWlc&weiO`XKV0BQ?$jjJ<`mKLiY zV9Q1)8T#_}Ck!)H55|EfnCN566Z2Kf#XqCHA)c4Va2}jIaf?_6;~cn~?`VG;qK*_; zZzr`J*7cn!yxfITWZ7`)`IKSy-y)1cnN2_ZG}!+m^T@qA-(Z5?St+f(LE1?qCY<}@ zM{WsYVr5@csuk4HdfoH8voJ{}_dxI6TViNf2xXD6t}J&(o=$av4`alsR)nduijID2 zxL@nrq4v-_L~5tm_G~K9(hG;ng1VF)lW}&4Zv%VGPoJ+@KNF1Fc?vNsaKPXn~V0 za`r1c`ZO;kb$9g!{8Nw%w+FS{6EV>V0b%Ru2L)c^OENzsABdUarYqhbqNH0a6P?&j z1fy^&5sdmv$sSMFd;;{=hHJoa@_hi102#Vg7m4U%0Jl9pSF#@{Q@a{Zon3dv8gmufgj~yKuF}z=p}ka)O_9X@92_ zMw_K)tev>Rbv)|%7A<83A*|V&2|VpbP&llt;Zfs2Q%ac4ETbBOrdT9I?2?KIM53Lo zhoD>?12*(+e#RH`wdhh$6g3l~4U9CCF#UpIgTF~m=}kE}Q9H4rGb%l6nmLbr*7qZI z02b6NhnbzN9}!do>W$4ZQEAv=4F;-oyUVU*=-GLBF{fIH`cz!HvJCY~CmE29^R;&9?KHV^nI0+}%TY4mPI-K%Vy2emOpYp&$8H zT(OXFu)KGDcRI0@U;s6gE}-l`9Jkgo>#chHOiTU8PxwN_qvdXf%fi9}8?oZ^kGMp` z*Ppa!=EW+v?@-4NKLZr(kkws1k)0P6gMJxENNs~uB}nMPgI|G6RCe^^!Bvr}_A`e? z%S{AtqUj^;c)*S40M*W}0%xivZ)(zu4IH%(_pgQfh)tDw`@V!XeKV)y6$5`Od_J8W z7m4TLYnRZAS*AX1t7Xhf@%L!VAvg(m;#sP;$^4G)t|z!BVY=_V(hX z3^^D$;ZE~Cn42X9c|^ubi_<~ieY5qtOL92N?3;WNV0wiV_U$N^UDdxcIeMJ zIAbmM`$C9a)7Use8%>;J4w5pDt&5U88{GtN)fOWLlxs$!^J|LlINQE`g>$?=>ctRs z+9P`LIinofM+J?89mX%r`i7>$&vk>(;3sPKgL?wP2}Y-b>B%#e297U*WubeviL!H; z56X9^B=<|YyQUIprbO+BP8({GiT6t~Gl*(l&He|OxV;|{fQ)<(DE^P2>Z>BvCU;3Iso z-VPrm_*V*ggJM=w@{s2D$NrUPdrH*SK!F0X>F+H{1U1d-=_7 zcP7Kbdmwe6^$d;*Gxcs>K9(<~Ivo(hTd_bEUG~VLcLj+7T1E|O=pGzCPU+z~ru~rB z9=V*s+U)7vTq)iAXtk4MdWcnQ-a^=+H*zw8O1ASF`v~Z z_AjU_G89UU9q!wacyhiG%aJ*hEkj0!ELRijlvq}*bit@t57~L3hi>-ZPl;XWsIj2o z`mC>nMMO0DL>34{0{HD+g8_;9G}V2ZhuGzA^nt5DyX$HqS{M^k`KD3Z4c&dt7Zi-- zoGfb0eFdVhTH4sPR#)X(59f|I;Fpg^gZj7Gulad-pM{BP0H4P#=#KYitGG5bajD;3 zdV%n83A6B@^OTTNnQq@uKarb$XLAQ%XK@gr?j&QC!=7}8@4I_zu?&@zk{&VjTY`zD z^q;Yfhewe%e7dF2$xSYBKr2=WuG_bFC}iYR0J}@uwmneZtX(@MEp4>*qGKe5-RiV} zA-S-SYHEr(vW4uYArr36yJ}Oy7eInF-B97#XC+@}LJoBvEgYKOW0>oDa$f)}S`w?- z*FQ$yZWdeJf%mm>gEKR+i%l9YT_#FbT5x`sL~$Fd>c2QQQX;#|^Ryd(-p9r#2M0ql zGc)gbL@RxQxgsH;CMoAa@9vN?HVY7DeSQ5tHZC+V5sR6*I&MC5Gvu?NS5a7l7pj~- z2p)^E`gG;O?Y#NmlpRYo zBtCbR%FcHlQ^zH}^Xhf)kQZBLHmJ{M^thG$e8!>SVWX+;z}^Av%*Bhb9Shs`p?FD)By zT!+7Z!OGrj=)IqBO&2P3A#XPRGeOT4=USq+033X|!CrL-x2;eRF`G4#mZPG&D5K)V zn~W8fF2EuGS3yA>0@LGm{s^%EL%o4kur#f$uC-x4iuYn)bbB=ouEX<3HWhVRI7tp^+NRmBPx5tM_<~ zIt9cAwfLc^(29#S-A5F&S+Nz(3ZwFAC7#6Q_0Lox6=V<=Q0Yxpp)GGaI+{%TJ`v%` z=9ru|cQl?RIxO+ll_X*}&#CEC_g(J>OYY8@c2V6`BnHO|1!X5sd|`%{ACllJa{cms z)!fM)hcEbN8ZagyA&73n=di>{xT|L^eahV%=1)#ka`f-8(r+Y)cvZ#&UsE`2{>Yn+ zWt*~LWAiw{V1^}!u;x?HP0tVgxNsypx$vO{DWlN)M zVPkRjr~!EmyeiCVvXURfC$>pt|A1$7${-}SeT~bcTf>KHm*Oy<)vN_!t~QPv-IdpHxz3TRG|1E@V)gM%x1jthoe7b7(e$1>WrSrU2|G?$D&En`JLo9v zYYb3-V@2ilzW!(FC$8(q#RdYGy8Xn$o-DUSZaw&G$c=x{#m<`~=D%teT@_751#v$a|DVzGbwXu@SpDvT zn~PfjdvnPra`w(D7dB_bSTT#(OB{4!s@u*7^-sELbIDZhR*G_)pCmSU&#>o*H|py# z!-4zX@dt&6cYlS6AHEXPdfyv3b72voK!E#`^O>ud!{_w~hsR<7RJmQDE;ra{BO|j+ z&uG@U(M(PK_+*zEyVAAMtVweUVDYRS>?I@wx`W~AL)0d)4f|oIt6m8Rkb@oFzb~;{ zpr1*UWfiHj`MBMPKi5+|vGAd_Ct`WjaM#>G^gIVC-K|J6fG06jd9IjPjZYLwj#KqT zc|PXcUBpR}=JFTqk>o15W<|JA5mmMV3CvoC?lDpiALOA$>Ry&obv5$6N;a!vMqKMe z{uc^gQuzNz;kwkKPk@3O>yMo>8?~dDd*N|zpVSAiERlmPkS_+(tSO^cyGfIOR#u7f x^*z60>+Y4O4VHRNP9mxa|M&83yY84j}#Q=>6X( zM=5O=FfhdaKY!rKw1{|MV4uOH#e~&7^iS3RU(wB8yDull7H-yVrg*7>C_Yn-J7dy? z6@=ZYp<@idmN=K>bUg2#(NTs)81s5#^@w1^G4Lb9&GX^1x zz)Ww?bq6KJekG6Jt0hC>K4s4B@gi6r z6%7w0pT6R)XG*u&pXUmSq=dGIIE@)pJr!J?jsD)-ULpn$7Zeu;Z_j|Tl+toGRzZiX zdVCZ4`@EU7(6-{Nkn&efK5ue-ZX;^gh~ftbv<+&kzwPv55U7Ieq@aj`sbW3o8z@mB zL?_*qABg@*No-(h*@WX2!=9MiFw5!bC~0KxVg`|eOT%34co~n3j)QlfUT!+Km>s1c zOA75XtbIs)e;|w^e&`kUk47-WU!p#e{T)Rl43iW*{Lig-4F-Pl|GBFm6&@~~szKFp~gchtM?rGsmlShbR>gxcaf#2``!29UO3(luXy* z(&7BUb)gbJE{7U+A#-m0A1}FRsV3G39}%>Znjx-WiTjo33N3i>UbSScq@CVH+NRT( zdAcz2)HTXxP0+jWKr7Y8B(cZXmUL*s z)_4KsS4A?FA2%&YmwmtTuVwl3sl#uGSaa%HhfEs2NXwlLhGphj4A$)&*dva8HCCbX zX*NAUo(X8k@qd)IUrCJj^NC0Y@{g6qP)b97B98fFp4c*+&E!9mU_4e_?4wv#_MzEy z%n|o%)!{D|(;G(~@sH0>9w^0Q5qoUa9d8^V&@bRH$mTlvY&L=<-wUs~mBOt7JXrc} zq?{YB2!)}5A+H(NkC}?mL_cru@vJb9uf$%W)x z(d}013+ysATx~Z}vI0Fl_z3w3k)WNO52rfiPp{GrO1tOr=A$y4j98J(X6QeJAv`6i z@lOv|Nrkg3WvVkH<6s3VD>LiNS|%fFcep^B=;B&QD+@C$mZt5fN=LA>mE=k0np5r! zHKnoJU^=u5WDlJ^@Cr&q?PY8&wC*6NK(~fr88*8teS( ze==}KNa#`)zFM%?>A}tv`Qi*eXqa4u9VTxtuA+w4>GXVL0kU};+DUZSnKq!)y`Q?h zDZeLJ{P|EtkA`imYL$$R!M3R$UGu$Orq38r!;m&wke^8uoLhbO8gQbgI?n$5va z?L~c|dzhlvu2`(wk;<8~?a*|NV%0T3J*el@dFxX<58>DrzMsn-no67ra;UUqCfr+( zUoNiVq$Mx;9u$v0U-Y6rwjvU%R5s^#PyS|I1-{UiiPPe_Dw!ilv=)I|Bz>QgPwNH- z;h?{>UZf8!(OcnA)6h^>RvymrSw_c5onv5Rpfz;!p6Nj3$*&e z09F+-@3JoDcbdJ*^IM>pk?}IKRlQH>xQ>!}5BMRBnsxvWOLCyV@A|h+1vN9`*1C2f zRW@+i+YkceL0fdKzo&XmRn5B=Aiw~=&^1wy<-3)KHAW!G>^X9K!=9>Y7Bg7$`k}b% zia%W)_khP~c*8z__Da*!tcRINeXJ#NJRNbuHZe+uyG;1l5FzIqfYrnn6KUfK#k(C$ z!b{O3x{Z&~X@AU)3{nqRe8y00od0chB5bd_Y4Ymlkhhc#onIjACqMm&PQZ+VC9Jh6 zi86E-s6}$aL^K$RO3V#!yhF8`T+cuzc-wGx5~q~S#5Y4jc+i6N!RTN%>gWwTGetc_ zpsSd8mG}6ovp|*U#Q=eFIBDZo?z$Ym=-hiAFj4{u{%%oCK%cgfv~9x3;IiVyxq(ZD zD$V<9vkm4%@y!kdJn(Yc?H=~iKcpvw2!=%~F%MCqm@(%4s6E4z#X=E9S6?b$?ffW8 zR)d0z(MYp#pJDu&Wg)1o65nDI(EjIl*osE5wKTCl{hpx_xjuT7QyCey-R{2T=G~-@ zhH@JLJ4uKoO^y2e8`QjHfBaG_X*RsH){ZkZZAG`$%2<<&`zFKn<7pEFJw-B^Do)D- zhEZpz`q<@ge={6)eGBf(%R<0e394~|G?S)^meN8Vp_&Pubht8Cgfn%`6DAaY$gkTQ zF-l4_vq{QH(ABL~Ml<7v(|6>3YLc!w6i590SY|EmSlUN@T`zSVjdoW~;G6QE?9&nF zAU}7oi?^RRNWvwI2WkEFW#xxJKKtc5-yIhB^MX~o03NKfeg%8kSzp8P4>LQDuU`%& zIz1Wt7ISQZd8L1OSt52#Cf4}+<~o{PhBpl=Yv?N& zDX(F0CJsx9v}5@978sY8pQx!n6x)hRNEn7kl6K|&;P5laAdCb1a_KPU)qym7p7wJp z3QJoBJ+5ad=e6We>v>MBb5$Rlxx{7CkHT@ez^%OwYYI@Tcc^xjo)+A8i@|q2;-mYz z9JtELj8Fo$3QbOw0#KW zy;)q(l~|0JIF;Xh(+$HcN7P_6JPoqJ`Spr+XX@FVH1XpbqOZ3-oPx>F;NlA00q=C9 zD=t7cS2!37yNxZ!&s(S?h-Ow_Xd}8hC$#0f5uPTR0O$S&Zq@O1Qto9NOkb5}QIR0> zHS(!tE7XTeo&cNwlw0B`Wo|=~E2qQu7symrZXt?M@lz_n=g=a6fTk=GC;n@gn(gxS^&}&b|I7VDjqZp4Yf=>(*uyFDkP|5jy z@wtXE)>Nt&I7Sciwr6tYigGX4lJknPrMj2XxcW@iwZt~3u;Y12gt^{%YPP`jfy35%&_CA+=6<5p->pf}#6BMC;Na^# zoE&@S$Y~wN6C@f>Yt$QWR9oPfXDmrg3%hWMA?xRB{)w++Q$+$|1O|7JxKjvJA^QhiNr2!Chmq~3x1j27M# zq0iLai2)YW%+S-~n?k4enI{PI!7t~z7Oi@GK~rB@+SnL^Cp%(jXTX$Yx=7Xg3hpbo zaH;~q&(#LJ(7tem5>g7Dh~C~}c`REEAqdeGjzF2(kctc&U> z*>OT_hzpEs{cfqZCF4&k%R9^Cv9q}s8`rsGVng|wOpS$D;#lGpvlAfrKE51nwXj9) zDGvf$gHd1wh4biC%-5vkaAqH>4vgnTSxYkk$%6n;3Ph~>On)1POi#&oaOOaMCnNMlbMZnz;uo!#S)!9Q-c%gc<8yf!;+PmJ z1w3y5?V{PAG&JUyDVfcNmax~Iur3YjOMy1UQIuZmqN~5>?N1)rB-Y>9RP9HMXGUqM zb)I;69eASGr(Lv_%<~mD#AHTL4_9W#7)J@d-yX}J3@R?+V@J(d9{F#qcVpn~7_Gb& zUj`hi7vmb3Im`b*^{^2R>nYTGeSlxh_UHAg_kcNtUnP=f)H?DWUp3V1>hZ(#%Nig5o>`7APpm;omGN5f+B{OV z)%Ud^t2c}($IXToJBof!pL;{rV*%YZL^>PQ7@$P+4($BkI+9A$ZL`+X!b|%m;&n?E zN;Mt$GA(Td6=OxWb&ZmemRP{Nh_5h;=K~}d6)ai54tHE)BsoLMRfG&pMLDn_czVqL zzT&-r9m+T^Tn@~maSDnG0olhY#-!o#3S=mPsi355{B`}rbc2a z5B4eaDedhVitjGRCWoD}WAid_bxdr2BP+^AQ4!<}3xOb~+WV5jueG~K&L>~ecpQH? zZNWwX;ES$9m$gD}Dg7?l?mS@X2>qc`{nmoV&nb2F$G@%V*V!R7tw5H;`ZG|(q$R*d zJPZ6dD-e-klhqSROp{Uw&o{MUwpN`0Hl_6%ckQ@gBv?capq6y!u(!Y54C#8`M)NT< zAlw1X)!cixwLkURtTk~uh^y@!^nER|=#)nSe7HZ0hk6EIK z2aDRDU+gzNXYqy(5KUT)vGz%ER-1}ne$hB>3wh-qr(0dh>@2hKcypoEygd%ty8m!< zoI3_;!SgwWn(YS{%)pOhQ~udg*lOEaZ3KlYI!A!)yg7Di3;I021wZ3PFo82j{#*j$ z}UA|eTnY%Z+NSJ^~d>ly%tjhU= zi~dXSo3|+oR#nwXkbn<9Ap6wtF3``+s1t6EZIF-C-`@9_Be>*y*mf`Ev(1%`EiZ+4 z8X^m598|^rx@MK@=(hQ**57Qz2mU-UykQa?)5o!!laXo6>sv-*7q{mJPnpOX^5Xc) z(7<_8m%;C9s7Vrjn1L#)6Wfd(&e~5#4AnoG_7}lQrN5Jv#~Hj>F_9-?T9^RBp)8Zk z#>@r<_gO@-Rx7FppumqFB{*-$+PF!(zfocqyU%d$&ydoQ3Ro; zcos9C6&QGIt*oh0Gk-VWOs>c#x|n>k>nrEE!0TL`8!D*cgCYf7YO9Cq zcJ#sB*TEHbf-}csNNa^x&@RH}a!pk7S!m1Awx{izF;>jaYF!?4WaW|V?#f%Lh=}r| zsdW1KGwv)W&e*=z3+~R9odFXV7hmu>GKr`cxa*wUQC|Q&|4>Yk!#c}r>Z!OLe}v@O zVfS;1#gE$htkn->P+y!EDhKbT4t0e9cU9h*5zkt$j;o__*E0w0 zF66)@f^P=Az>pB|L}hR`Az~I9h&`nZGz{m9(urvTOL%rEjI?_u=tG$ZDtS550f#t+ z>CEt{7RMm|L8U!Di2Xaog2DUktR4!EaUIbZ=$OvQ$eZy zrgwvT<7sEU##y7cXgP5i%*&t`#-Xu%u~6BE`o6jag(TFBz;4x--@KuH!l3H42F`}X zRY8c&OAjhq;Aa||T4`|U+l$_O#~(Y1sSh6BYf>%VeI@N3Em*1>UYY52>!6~FsFSs| zAX50<)j>(duR_Z$ip<^WLCqd^QYCU$|It^o?+qr4e zQm4bES=Ws~V*rn*OW_l5Zw|P~06%4z_j8(8jA&xI4-fKb_1{Q&(RkS~?tC_fEY7!Z zvYskg{O#dR2jU!38q}K!hJj>i+&;P7N4b6?;ZOsxw0mpTO?Y#dOwyUJ5pl3Na2#_; zupC7Qm;%4Z{_s_;^Q>J?UHpc!e@`}nEuSg8)8Z_pOWIY0s}LG_P`{vsi+52*$Y*Wx zajvze5M%jv-|IINt|ZG@0b=KlrioP7mUK>MtK>!KD_h_%FLXb@Ed{87vduD_?<{Ia zDv;bE5>SU3y>%wcqbH&`oUL&2u!gpM;<25>s)aiF!Q6vWdwjl@XC4Y|cLMfprEt?U z{xM5usZ5Wq$pkC4(*bjys=xg5%$w4Y_ZyboHu~N53b}`{&YDFUnAxS)2xA=0KB=kc z%E-C0eY(`xuEKmb0*+i+_&5&f<-L>LDWgmO`^m3^IId9~HBt=c!lu1nq=60QSYO8h^ zlN(zrc!?L`N_k-t1%-aLe_Oe$z2EU}skw8A9?9OgTxNGyOKikWpABCqvQ57r;jDE0PaYn%#TpmxFF>1A7%9f z9044eU1s7=FAlUr#RHX_!V^dQnFyIj^yNDvn}t~G`)ANOyKA&rTsP)4oW8x04#0f( zfJh6cfr-~9R$9G-VdfGxMzj_L7!sf&c>9of1CZC{UFZ@n5ZiVKu?*#`= zXbFo10ZsW)~p7HE;W9sB6hwIgxsq;&`{dr!b(yy zZ7H34*%)y)-Z|nsF@3-7gAF)MKu7Bq*r?6ZPG_JB9 zYJ+rbO2*7629jjd?Snj*T+hU*2;rr0N-h3PzS+Y>@@Ho11^6 zXe!MooiN`aI?i}bIpT^;z7crQYCTx&7MF|*U|tAV){dyEx&Qo(gZ{ysm8$~RZ(oRB zJe0fFzmCOlv-Ga*@qDZTp2Y(_3Dx-X(M2f=F;p2YeqaRif?r&93wl(|rC6IRb+zjv z0)qx4oLdy}s3$sp$K5*Y^s;3&Zeu>ZB5`(O-`wk88#+v_+>~rJq^c}^x5&0^9*u-~%cuuk zYa}a8Evel?4Q&osjox>icc#C;C<0+H1Va0}cc{2#asw%v@}b-iLl<5}7cG49CvC2e zI=N3TRiGr`N(UjqL>J0R_Fw*{( zXy98NJ(K!JXS=P@!+cUJ;vkrkywADhk$#`#1j-;0xDb=!2??-bZuU3B8uLx2QS29G z)NFV%UN@sHI_)nqn$dcCR{0P?AV>;MT5Rx7QhzvgCo0nmsjRRV$F@7r%b+*#4KwX) zFK=2V(OoBsU+85L{XgbCh>H(3?ho7kmxY&GhRt_LJ^LoT?V<6KRJHpbYCoUzqjyN| zUt0b*tCY}8*jgK4WXDiZ$+M*sFgI%A7yS4A9JyuGe>8qQ_CG1!JEoU_K>gRhSY*7n z)N`eN2Md#QmOmTYg`al$-##swor6DWVPFuXktrxu6q;BEqR7w2JuUz1HD!ji0~m!- zm_K_sb?n;N#fAXXEZ-CRUJe^kkY|N!9}5j!gp*Pu`J=|8EEHBO8-_M)^M|Z*L79B|%C}UB!eQ zO?7o-|92{!0;bpyQfH;@&%m=3EH;h7(8Y-b1j5snv~C_r(FmK@>_==lA|kH(1h|fV zo?LRJX3fB&Q_;|LA76Gv{u?k1O;}9q<1lsAQkMf@lLTxV4K+MGhMbWTx~+r877Qh( zfGTYqg^aS~TY<7x98p>kVR|iOnjFQ5of&G>-vtjti&@hT9yW!QtHwaXlhpS`R98<9 zC5ZArFPBqSC-;2d?(ZLjWUv>4NU$aZQZh;ZduvC(8Cw)MYXU5Xl@qYNE2p_>y+Et| zkMi4O0J7?+kJd*VR2#OM`)7SqNGWiL9^D-rgyeZJg(_goP2_;irJ7-fSDKXC0^k$}M`V*B+@KU7{-VH`vTc)zh$fTW;zq35mnSxj4{ zkH;w7z8}>ZbeM4ARo;%E-=Us|!Nbo*dOM2zmMlKJAu;@!)#jeD5r20lmv2y~NWu9) zk!FM0fGptpk!T*U?sto~latlk($QtSGLq4D`9B%hQXh}%|M0y32vXSQM>4y)gMAHEg~37ftNJp!-l|69k=Gy;}RWV=RN72 z2YA#?&IK;O+h98e2TwV(p~J#+mz5FgpOkFu^jSv7i)5zC&uy*a;E_M+Y zjy2J{w7In0p3zEkrm7l+Nvym=0$%}yZAYgvJu`1WK35ZxuFe}G(0P@%16_w4hFW@o zqy1wRPMxoLs@{hkA^WFAgVM)wq1vC|`>!VHcgwK&yNkby-WZ(ZR&LYIX0$ExLV16H#al#=fFY%v1I1 z#4h*BCD{MU_=>%bWPDm=u79CC8NTwT&Nli$KI$qN#;of!tc2Tp(b8YE%@~v)GJi8$|Bdlsy0!o^n-Z^9Vvgf@{KgShv9KI|0aVYieN8Qm4dpXk{**BwC zeaeG2CQ!BP^rTO~a2H{>ki_2h}Y+~+(wahrH;?Tf+E>yo9eHry4-KO zDz8{>Hu^xD!dQ_i;PDgZ`~BH?WjgCo5fP&ZG?~ZGoO`y!M#RlZmk_6kpwVX9&Y%rJ z&ukn6hk}A*$BDTrLr^PC0UB!9V9#SwX7gF?YrUr*uQ%8#ulDmSqQM22QcH}1rh;kj z|ISzH-HR^eSa1p|_!T_Hk}_Tlf7zm1HYmWPxB0@PGHwMZ;KZE<;Pwmc$e53Bgc5XT z4_yHRRDsqI)WrsvLX6V>lMA2|-rED(XmZ{wb)6Ad8}0CLzE)OKp}AlcD^X?yl~gO)Ddd-ek23ar@5=%ZCj>&uy2f6vd3~>6h(5f z=plF>MT=!8&ekE49eS${pXsH8Z@Aa+c5Hd^Zb9Ur*NQQkQR4ASi#F5pyTVd5iG{vN z5nWza#$0A#DpHo&BXsrNK#)W1lQMROFItgRGOp@Jw@}&S)Ev=?Q_RnY#;gm#wIDAh zOB8u)QPYnFR^Rw{LZ|Ac%Ow*N4V;f(mk-G3enqOzbuK@*mSWer)Q|GLJ*sZ2lKg8J z@J&WWM?C!|PJZQmEJcubzj$#bG!TIzv;N9Bfn#et+s?l00miq~`1TXYrrOkapRJuX zl8QmMyNjZ0pwW=E82;NQfoeNA?eewB3xRa4r`;b1fge1;6^HZf&P&Tzp_%!=zPYb_ zeOnw_puaD~eLe+xRHk< zy7P8DkR(2jnfS2-_)7G=iJJ80f-NzGByI0yv3*)cV(gZiSyO+Eu_icSxqTJ^pTnMU z@JJ{Do_%9B)VcLw(XQ|BnF{Be@t%Z8*?aO@%n@DAu`b#VPGzlCC6nCw)+DnIYj-B!mhUR^rExC+y`rb#3JI&bmX;&>1SPw_NHAL+C2RRUg*`mAd$?B{7VWq)wB>y; zhh+NBVml^Pu*hIi@nOSx|MQ^k(zK@FpvLpww?jT>;qr4aYjriXrQydrb&x62?p?U3 zv`SCiF+js>a`&pM{?<-28Z_sG#B3+GIC!l7Zv_*r0}=`eqe?XtI=X?m1ZU@>=+3sf zb@XPSNV^t`nYyu-1%>ocxv2cO`CPuC%r4D5tCime!AQ;5uO(?dMS;!*%)1Pm(O3ZF z$NU=oJ9>Swt>#N~zxu3#3>>YA_w(4v-v`83jLqfv;h|Z4E^R0om+^-7I0{K%am}k^S7)n_R*5osZ}OI_SC3tZ;RcjcUQu$6`H8kL5r$-Cbp)!;IIqFqFHL_q12uPWb~?_tVKCRjUxm|V2#?EKuZUCc z2#y6{3ny1ixr>p&qmN(YiBE+f2h87MU@Tx_dWd@$iI`?9?1D?VPEd6GmCkWkxnPTi zHviIlj9u4kfbC6s7WXUyH3A~>ua_DGL-FJ9`XgTv{+It9#Fgzsd(|~PE_m<;Fiba~bi@Baf<`r|2G zlKzhVzd?-un?wG;pppL%A09yql zB(`M!UqILYG4)AG=r5ij4*LJ$Q2{1mIz4EHw|&9p(&X)@m7RiGOC-PNN1WW}dV=NBnFOB=T0_rj96q$L;y+oGm<1TS}JyU z9ny?hbc&rOn1bpkEYfc&r94{ z>oq&hu=06%Lc4zZhZy^r$#87Cq+JQPbC#e8I3Y#%bq;+&1obkJ1zx{f9!Q{PK=mkM zrT!q{Yp!WFu;eg(q$y^544jQs#f8o*e~eg; zcDq57L)ci8#Lw@qW-Wyy4#(eK2NmLkE7;2)FHnm?g_GPkRaqu3WCT&^)%{Yq2F9sf zpFAW3%;|d(^C(!3hPCDR&TU)2Uc*V7gdjIqFn6rk!U!uIjcI)s^ILp$7f)?4306>0lw3MZi_cq542LLDcS z9TcccwOu)Jt$l+NW;PB}5Z)i_*~+yyn0z(UnLg21Ga*5lc504jR51hgA1^4uI~^p` z=qgy8(nK&ob*u-E8r`qNbxDjAU{9yZTi&rSHT2q26_!_4x|12O51Q1vl1ropFq1&1 zcEAG-9*%-me7Vz9nQ>^Gl9Exi^dcq#sCi%=%n%hccT!2;(I^!S9lRWr2%}9BYV>@8 zvYM83B|D;z6S43Fdc=f(54v!Wb$8z4)}2QNMx0fM2WmlG*+njH5q1sPU>--iJ?a86 zG}*0(?C_*ZUVY@}-ld$aooeEAv*-F2bcEpFovBQ|bMEdw9a*+|-XXDVDV^1{?kse!?(cmn$r>hRoM&}q|b;)tAQh}Mgw+}af#`udFkQ&p`_yoLaOr3>&w!U zVu?+YTGXoI`L3Pm%q(hKTR&wM5@=`XEHs%YALW>hy zas{pQ$t*@N7yO;L>FaMkXt*`8d!JhSH|mPzmwDFRHk%Ou7Bo%$#Y)$fGd^B+Kfn%h z(DbH=sOI4~J&B90TTJb7=LUZc&B2p9#FBt~l+bWx;YGOZkn{D{e4$c%d^;HKs0fM3 z6SE|n!{4vt#<=6gaW2inn18^Oz%}ojP%$e%Ohj`t2ZJP$XXcl6zmN`(L`r;{3sTW* z_C=WKFY@M}!wkheM_e_@JmJz|k`A<`$8DLYKQd&f86x`*ayvc+=-OK04%m{j(I8m-KKM<^^ku-Y z(&Bpmd+4FgctVg$5l31r@X$~pB4xEdWqg^7<#qTdmJlQq4!yYEf-asTu3~%(`}j)c z_9Lfr18tr)GnE$#W$%G>O&Cge?HX04Bp zqH%kj7VEx@Z!Z70mtX3xN6UjvblbW-+3{mNlZ+O(LyH$rsc(JRUui%RAjS@M8;U>$ zqTY*mi}_-N_i6ugp;Kr6xMvGXzDANHy=4W&A+l$ycs({*CA|-V8sG#2$ zYwHwt8+C!tb~=tW{@z%@-blu^^;d4?Yvj$V!~JfAVroGs4%4Y8T5P%9%_Sk@<~z}g z22}_$)&cRH`hFOhi&2-(%j}14LF*#=&r?5BqXHaOgKvFq#&t{ zjX{5bRuFamPxH?L`Hupm>@mN&IK{;=gn=J}(O$>cdb2OdCk1cG6cyhxP8Ap-X}e4= zeKidmC0bV9QdQW-7f+Hyh_n8&J2WTg4jFK3k|yk>*lkgRUv3au#BWR5sFcI8N05}x zF(a=&aLs*;qJSK*9Z*V->~m<*NY~^L;rsdcXJ(qf zLf@i?&%U0x!wnh&jG!%N?$~*Aep$+jUbhOBtvA}>@OEWJcg$zsv^WV^M0v*`5~v31 z7M1Q0CE+_WL11sAhAH3l*B{A+#$;RJIfR6HRL1Auohd5X%6R{3JQm8F)4D14nEBz5wDZqhhO)iq#A;Z%6|(QkX|^Pr%jxIp!R zMwxgEq#C-(3O;BgR!pVNdxU^>WL|c~oh8vkO|RTcjcJ%k?Vs4>D>x ztxe7e4Wv!4A7G!C;iz5RDSK3;#J{5NL3(HSs=rI1#9S$};ByD5la!;&?aNe?lWjZh zn&^CHx9a;quFL?#yY|&$5g2OM-q=NTYA!N6J0x!j=IuK^Xopj;_q3Y6_c~NjQ3o_{m5m(a?`JZ}^6r0TQ=hnP0=3 z)y)Xp?sYCu;f*(?D`vWoNHXWzr6qf9=e~Y8NS)^M>}k-wJMDOO0A*%9Hf3_X6|Xpg zDvf*2m$Mcr9sflGrfb=B+d2yj57PT>A2MbPktnZhjqg{}T?Nn0Lb5>p6&X$zU85;Q zqvhvyT9ao)>jh7ctlWhL+=Nv@0k<2fgex~~Slyxzce#!T&EP{L;?autu$q`)lTgk` zITFAw1)-8iG5V+QF`^BiZ$N5lgq;i3$sLTSy$#z7OB$-+53Kf(1YW0}J+cAm^Df6Y!c41(`-GdZfWXg}8* z10mjI0O$#c#|Te1I*L}rMOE8jbaXI01Jz}J!f!|>w@PYJC{U~FF(Hal{{=p9AlXU4 zv)f=arvNqwQ<4LPOK?t}0rW`nHM+7Jw5@gL9g@_g-sIAxxYUtbj5ndV}TUOIdw6_On0_V-#Eo(vrxfaok6o4h{4I|$m|eV+@l$OJ<%7;G*5 zm~#ID{{pISk?+Vg3@t;K7-cBvZ+#0B9cllzX|3-I?t6qpyu^R#xa<{P%JBWHZts2u z59gxzEfH+i8o}i`9E|dA#VBb%N%hwVD=UEki@|YGQ^FFX3#csT-~LpvC{Uorg_#5v ze=GQHe{?IuMWP@Q5l~>&+1-Ed^ha?8*gx$e&`NY`8Likm7)OR8MnmLKu_ zbY*im9j#aoezELs$LvlRj8&I+2X9+)8}FZ6#{U6q+gyt^suoa)xKTZZmGZQ7REE<9 z2BqbdH=~}W1dkY~>>=(ilc0o4PN!LR6Rr{fted!?6ZL@)l*RCBYHP$MeEd!un~@}K$#PuQ8eH+%s$FBKfnK|}n#ObGckZ~W zj3CdM?hr?bL4omS4(d;;;tavQ!)|J0m(pU(a8#A>m_Ps2l7wBO#~U1d{@vXkz)XW! zr}3?7Mm@XOI7X3TP){myC47^?HHEKWn}3iKgWT2Ms{qqz>K0ntpwCUNAx`vW2DM5i6hc1itMhCm3i@g z?VfYm z6sOPL?_NEKC&<5FY|@6vwDQb`u=9zPh1Q3_VXshN^=_4P)$q)DU_1<6`<< zFC#wNpE{NO#A_?^K*qB$8T^{P(e&GAo9FgLEV|%TED4?{FaOZ$vyi&Hge9`%=(^m^ zjV{n;8GnexQK7OtmtV9!776tFcvOux-`8`=lS>HfL;fxHv%AiI0#*(ctU8)#P{-&s zDxlNG&WnG+9!-f3zI6|rf$k}6UvNEO&F_yXcgh<6xg;YoxX^8inwWsYVM}J#?EDqIwv?e9%QxAfX={FE!Op8*ljTkut0-r_t~zT;V@!St0sI1_X_WMn(=pqjhWN!!HgIpJ;q__7h+STx(b^B8rsS%wmVZ1%e!x%CBM`= zl|?9$ZT#odd1%DtN}8&v89LXodvi2qECk=>z;m16mpTGTxZH*m0t$V)|v+sB@bI;;Qxo$fszbGlOV)y_G1YSJ(nU1M?DN9tlQ2KdBg3@X8`78eY95HrxH;RC7I*sFit97$0wP z9yK7^wAoQ#1(<1;-=7Q({AxPe4}Rd@W&DkmNGUkF`hi#^m}v!3o1wk=1QGwNn+G#m zIngW8eszp!xznZK*d6LaN91qUR{;a)xth!t1IEJoRzT^$Z0fI9@7dG`4S%z#OI)Xz zW}@?sd9qleWPdlCx3FdsM7`YbSlcm#JF~)Q> z#a%@pJ~96(P!O|yd#x{fsBZz({w^VC=m|shge|o1OxAIzdOZih9ipJL!po#xJ>Ats znnm6jLa#zM)zy*3M%&c=_@Mu{@Iks(18XF+FMtutX$rwI+0^%i+-677LJ5!ER?lie zMTX$h4em_rofDL<_dbgWh{U;l>$q^*1PSokYnwXa_T0)6!5ZQ&203;pilfw+7Mw0D zpqgy+;HK`n5*jZ7i$7dD3qhOz zS6(^f+*5qXqvX2!3#sC5>{rSPSnsk8?Sj@i*I^H+h6f_=g5Hx+U|s!ViQrNjfPvK< z!oNBcsLEQ`s)`m3RCQT1d$GEHy*oO@3oS10n>%O}O@ed3-npTC=A;x+36nk--A-{> zR$mL98|X(-O%RUX;C98rP$|&-(;WmGBUZ{dP@o%brd$q`w?Rz2{y6d#joqBsnQ|@b;4X`tw zaDqC=Dkfl=aA+iMJ*%KO(gF*M( zWz1(!gsDm|^mn&Ptd5=c^K=9pRrsNJEl#7_M~2%^i_5(FJhGvrCyl4kskD~4(m}|= zlbr-VpM7*rUGWvIm>c`q5^f!T@Jy#A$pma?j-OLg_ol?jBp@PcH1V=?2a9Q<3?$>` zB#_9U%}QPB82rs3F8z36dUaVitHryU%g20c?k?{Wc)Qnm8v%!P0UUOyp{&W?ptkH(z>2hmrc5$3OqR zNF9)J1{$n*8!0qUXW9N14j-%f%)_3T##VOL)kgS|WXQ^!LHgW?$CwHu+Awq7JBqVR zJ+pYu;acQy?^C&<20)9o7-G?A4RxL$$Y%N=W2(tOz*_1)l4mhw?CSz=wo0xSrUYg} zJ3;72$lS$f9N5?NHQk45M*={s0v=`8d@#-FY3)jD(6(4kRZ&WEU5DjivVpS-m|Xr| zx;d=d%Q7i7s5jTsq{kj|gO{^=SmFKA4w?IJ1BI}fJKy1SJ3<`KsGHKSHfks>mM3!CRs!s}eSyEem+ z_Q#9bzFza3dNzQ#YkrsC6qeb}JAR#^c)2G3y#ij)`~JO@;_}Vuxxt&fBT*??A3Y!f zQmK2rhBRym8Qoj-xnOBjQ+xn;N*mE}ISa&qGZu|NFZx79uO|LEPM;Cb^z=j;yes95 z6+jqwe|;Gim%g06JS+~XW38i|nPcPn-1F<_9H5is7x#a10elPQG47rEhV`VvnV46H z@m)bxtFE=7=Uw_TPI2IJfv0AHMM@ue0Y0bYZrxp z=m9?Ly)D-x^8_5}a&QHB{fMXNY`)dTcfU-;ex9YXl74xjI%dtCz%0sxrrnq@yt7fX z`90M0rApItZ(OsvW8b_pqm#mr)eYvzM)b!WXOaUXtH!PAYK6l!h!^N`k8n`Npp&!ky#0ZDrUJzR-%xQeu>hvl5~5bTw;LX->3dWO zJ!e#YouMtJJdYPTfv>mf0(vQdkYe?`(Rh*u6UxJ89$E3|j8Fb=&-*4C=)aJ;{oB^d zJy_Hm5n@~=I==Gf3<39}=Y5%UoX<=YXD@eTJ*}k;&b*H08hzfg-7IcD|3V4&9+K1d zeHJfvVDNJg>TpXR#^-)!u;-$Vp7|Qtyd;9z@q#|-(?N)YynM!1-FAu^BXgxBMOSS% z>-i8a{ajzwNR0}e&FGiIyE^GI{(WO4RYSciW&4%Ax?{ma)NyoHWzTD0z||WjO_XEBaIG zu*YA>l`q7lkMJAg_1jztBz$i5Ew8O(rD6#t`Aod2GkU&0?oEOLZ_n<>?drL?*3wlI zJ2jHLSHlg8EP$qhR_iei-QWE$-`kxK&{F5$QW740p~b|+UZt_%JJSv~^t;3C#tDJjO_u4~|@un+(>ccYXByB<<7nfrNzQ?fV@8kMpjwk7J974(;x3%H`8&%9d94%+jeONyc8tiYHPg_~L|&)>8B$WdTF;(-*Eq=gf2 z2H|00Cq4^69UTY9OMsQD&{dPoQ6PDISLQuHApOGI(0W$6F<~-C8z=1-R}Oup7DgJ? zyZ8;u{||Xz85P&Et&0RmAV7fN?!nzH2?Po5?iSn~x*?F@H13iF0>RzgrI7$Z8h7`` z8hy>)XP^7*{&;uXf3L>q!5YP?TD4|XP5oxATjcnBaUEP7c!7ZU?wIu9weaE>hV)k* z&Wnsz;9xSZ4VzvKx&khwT|?B8g_^qX&ablYdgkUhmu6>}!~Cb)FnA~Q(pTq_9?D8x1AVK`EAE0r?bQyf@|1EmdJ2;roG*s&Cn?-$N? zh}aLO6@xzgj8Lj5x{<7=rJUI zjGbl{uHE8V2oCp=GfMo|w=hASV zrygkqdVA^{kpA>o2)4(;A3jbSLSOY}c{ zKn#PhFau}t*R?g8t+9-5#(ygxt^q&5dhBo^T4r z3tHFR6xLZ=%_QYrA7odu0v&~bQ>{??GtjN$PeCa~n#bW7z4Ex%w zYbl%hOtpv#@e|5h*NGnfV&<5=Q`tZ5dptI;6R<4yv6oN%jCQbfJ?XAiLd-?tMULG7 z5H*<$_`F!;Of_GP52wRu;edTqtEu0te9;g0p!Ech#Pea>imx0H?t~qeA(WRS?xxY@ zQ03isYTaX6!ExS#-Hy%ah-{=jKN#L@wDCOMv`aQu65|?!yyG{DAS{Jqb)ZOABXm#k zaC@TC)Sn!$zw_h1!=;52@Me()F_*Y?l)P)al%;%}s%$%HLBx~FMt`Piu_)PzH+RNI zQmD<2TTV`n*KYiadAWaPI0);zoyCPP@-wm&&$nJN&13pNw5hqPfFG+Hrq4 zAH*BWFE$&WG(jYJ_}Ul%Pzf_IeBwl^F!y5^=eijC z5mL0uLx&&yYY7Md#}8+D$+G2|#ZD2`QRd7<0^1u-ZCzJ7fG&mm0TlNZ)NPfK)5SYEhP+$g(Ut{iltaUK_v`kc%?2dR z1q-(eN(^+Wy)F7N8unwv?Qf2=6!a294`qvWJJ>C!X3Dv3zL($mc@eVv?cuOfoM2u7 z6V$KdNsvK2W;?>yMn|N6Oc{>ZJ+4k{mPKwAc7Thf_HtF>x6c+s7e}f3trvXOLoGBE zfT_n9W#fJWZKu*sZ4*rq+a@qnBW}z4!4#0Wl-t++N-?cT1zBekQmvh zi9@a|SQtskkZj{F!K8E^%j!-19VCy$1J}kZ+TAZ3311Ns8q=1-Q%sn002;KVm7W|_ zh00RKj%n_w5aVPQo%XHN@sTjd4KZl(mR~-S>Slm6P4JVL*AjPHG(liSHm&iB^gdHr z0ByKFLss9KT_0C%14q3AiR?It9ZNzuyImZZ*t|5>vAC+>Na4oR`U9OQLbk)`f*Oy( zzrzXU8QbKwwH;tS2_1M%laMfwaVh#(J-QOV6fTITW-=HVMfxbF&lYA6L=}i`Oq8U> zUt+C=Ptj==x(ko_cfc77uUa@0UVKdFrsW}ap`;hCgQV?@Ls zB0r-@R5)+YpWEFclFr;x(_}r!_@>^^9w)je)Cwb5qhSq@_SdGA{TM)9SZwefRxvAx z-aJzlP}#E|bg38XN?qM=e1POIp{v!StQ-sk`M}x4uZwFv@OIC)ArAo43 zph!0PP+=BeW8@;M;nT2*K~A*kb&bSngTBiiIml)IOL60F>C~pz$bO;S=3}(Ee-CtF z6_B05aIXb6^lW};bFMF#S;evv7mZNXB7A4y?%VxNTQSS9Yj`cU*W58zW^$*w5uD z0aYEHuj8w4*G69MWw(TX3uwn6`m@yK-1B?y1W#~|FB&*DFcC^dLC*wS<@ zbVPh)K6j6&qbdZ^>C}$>uvLd^+rqLql;4mnrvjg48;?5WU8DAw>)<% zbGj%9ZMDO9o{F$QQfK!)W#cRR?0Z+_p9pPi!~Tz?qqknbiA4z>?7ec*j`|oO9{yy? zb`L%~H=a{=J>!Rf2U{&C`5%(;;oU-k=44QB845%Bq2|VWT z4rxPcPHlGIwjs+3K7Xf6;gkK^_gpLrW%*$!Ur{~nT5WTj^xC&8SVXz5U-z}cO&W*M zBmC#}YI)P$kw_$Y-_roR?lvUC0q8z?SVylh5433~G^&x9yajghPX^zb?ZcVVZ(_hm0KH9XO7G2@y8B017TqAqk z`pYUCQX(|s53$fTZnGw=@V*(dxH{p!pXq=QN0w|ZpTW*Z!k?7I$FM~V+0U3&_Sio) zgM~eG52T4%-G$Vl%GNMcv|_veM&UFjI`a%v+EryQH~ z=XoHj9~qhE0&ze8!CWaQnG>pHD=Bix2(k=zNSsMZj33p!`Z+VJ9-mT6p}py_@y!E8 zUD-IbkWA=+gk7YNwDLom{Zb2lXmAm|c2I!2WQFB3xg_IhL}R=RH-XiBu|Gw=9yGq^GiGav zRVCLq8?NkRw>Z73hG^}WpCwS9uC(_%@1~1*JLsx*n2>dJ7ul3UZ4Gm_Q8_ax#|Fjf zDtB5Pbb3&&X3Vbk>OAj;emKCjI%8&@0?lR8^&g9s_co1Z7=ZFiTGWclO0QP$j&}tZ zN9$X7YPRf-T$A@KsOiqx~>of6I?b7XNO7T0iyF@e>dF z!-D;px?b#F;Lr?rs?CVa)(AtvZ2p#SLf`4ORYC$^R)LdmQc`Vnnb(VKPi4b^RRzi} zOh#|-`%9K`McTqv8+ppR1)X)HCa0vlt+mJlyCh_ zvXp|U=Q+maHf_Pvj7B}-;~3r#_386%!52a07sLx5ZX#_y7Bmvu_*IAH_8&(q7O*-L ze%u!JCp7KZS(U((KmQ1NKG+-it|2(KJ*X~`XnhP4h!@l*z!3M2(%vF8J?56PtPHq; ziZ0(XIW`_D@QXHZS9Bb4tqH*%lm#Jmr+$$G59?lAij5$`g3=!qXP;TX8Y06(HxYi) zEgj|G-9NJRr;Lz~^@4_}9#aS#w;}RG9)qnd{sE#&q8nw#9orz$j{f3c;QM0e#$9iI zW{1WTYaY6EUr|kgZ+cr|I!(-~qMuVEYoIiq0TS-S%t5z7EK3vA6~o?*(_`oLoltM+ zXKv9^*@%>Y4!m-+XHp0zpttdtxby!UX3&}lz5CHLbj zsn22XdaC=#qQ9!$il(3Ob@Bsf2G9%=F$#$O{2tkDUsTKfB$Ss6?_9@@XVO~F;O0O- zFCC}nCK(D`H@1Ao#TVRU5>R1DF{zc+fr+7?&YDH zRT+3T7x9J82CGlXZF#za)O#K^GgjC7!*6>pen0TFco=qc+Z_4>e2$pEYWg?xv?o_3 z{ZL-X_L82QH{^lL-7uNUD6W;-jct{t>+->7o-wH_FTUzJ6|Wl&2u9m#3nET>TcY{4 zt)Om9N9b{akQm@$Vv7u$IUU6$M9YUS*02NqT)&&|ex4=QAWr9gMqJ%w8Neb)X-@KG zW~Ce*JSsiNf@x!;i}^Kp;0>W=Z?g+4!_oEYc!LiXmiTBbl=P`9{Ul{N@$2?yRmjV& zxW696IdgI4V!m`87%Q__2mRAL0U!s6ji#$86CmLRpXnFn&{4|a5?u+@0DZseKfE_r5;4l+4^(i6z>_a4 zn;Nzj$$^|;OC)$Q+>zHF^yJYB>Ks@a>pDB!pP_=w6Wk^kbo)23!$nG_|C!a4ih)7T zoevz(?vRIyM)#=DLH#6jXdY|Y`5y-vDW@d%D+>;*?zQ1oy_D2O&X0e)mJs278h8m& z48DpDyGs!E+#^Y)yKRanu>Z;D^dfNA*-W2bN&5|UEdkt$yNCNiAa*EzQke(y-1SNA zMe3k?eE3?QtFe&(tLHB1fBO3mugM{GTx%Cx(?nw!1dR5fig0SeZ%ro|iIvr1i;Tfi zh-WbIaaYjfG@Iw28)JX+DG@JDVo!;Ed_fC8b}eXf_08jGT_tM(-IZ6qjR_HIox?ix zwvrV?MFakV6R&LslS8r`=JAb#gsR%B4$pG_a@!!`zNkKHUIT!OZj`$JFnpsSsw_fM z9f^UzDz7HGEZu?U(wlm4FwO_MTd{crPG0pVYQgL8m{aWK@O})a!^gD+WW1;j zTpGnNOnw4Dd(K3a8f%V3MaWb42{rx5xS_jr%)%s-e>Ngz@sVFDWGW@-6kvd>-Y~KE zIl++RpY{bWlOL}w{4WgyzgB0M?5lhmTs%_vRA`ta|J6-ZKTYVbU!Q1b7J>$8)Ied+ z1yohX;g|5D*hldKj!+7 z89uf0Yn4W$QytfHq`@?iig$f0GGTVP2C3n!Rc)7t|9j#!A2St~qV*2x?1)qoxkPOB zsBmQUD9JzvkFwRS74IZ37vEFAG>QtjU;S|6Fm;M7>D&g0x5wMv;7Rb|bOj#Yu2w{R*4jGV zBOlOY0&bDEgU(6ak+Nv7`T?g^fFV17N+2#H#m@8L5;HpY?Zj%(H=kVb@69YV{14fJwzc4k?L7XBTnfPgMPtMw&nN4jy{8udwmRWi&729Yn)CFnkW)MH+ zBh;$0!B%M6y+3Y_COTwJnT%?-cFK5$y@mApd^HqiVcr2o#AI|SWRrbSQtsFNI1@0pbxSPHyj{9UGyU{2W%t=yHJ>EJRloy9S3fM~(TM z_A-?)2mUgm^>$rhfJ2$^9x4zP$ovP*qiT#ShDlkRXJ%sb{upXoHY?|EFcOB65rdPR zvNz+(^_(w4)@S*DhR`b6M9Pu5e8BL&h0LT@HkPcFu+L??h=8sfgI1Mj#GMH6D1pH! zLi{UvmhCK!raRwz2)4|7`&C7^E&JMUS$xXQ07LiF^1jC!!@n$k@U(BG^*{6<_wui7 zbtIIp=0ktKCXL8*BH#0$dfM(JPn7MRJbjuBZl))(jXsL54)2#z9cePIj^it;Ui_W>xbEm&@jhyvElAJYprz;PcxMYPmS$811G~?gqXUTAzoHp~lWU&K*$x`9@0l=b z;+A-Th_TgcNqdg6hbzv})aJ}VAVN%x`-#NxVtwh)o&c)tMGp>1rPM)WWBcBi{gD;w z!QqB&e(~kC{;92JU$bW8tla7IxiGx*+3!XLdQ)6DWX zlzlKYrMA;5&vQ$0EE&PxPvYffQA0V#@4S-oF=oZzIF^ZXfBy$>b=icC%BKo+TltEmJZdyFy+^;({fafn2Fv z3qMJRG>yj%^8^3N{OAvQVEil%K4G92@?>_-03k8O!V4QR=eIOUK<<%vX4L9w>8&w6 znQ%;R{_0NeX89l)CmWGV+)ePCIt> ze5d@E9Z`*3e`VNt}SbMxuoe-{kyzfibd`?9d^_H0dZ+%$iatNlB z0Y4Z_3rAipUB`_WIny)5#V$n4_mdfz6VOOl;~f<--+8xAai5xNl5WfgIs_8uNEs-h ziWFg1m6Fd7x_Z!ZO&}-}vsiI-_*gp41+*lEi0@jA!32Oeoj7VDp6GhoOo6Fp7_fh^ z0F=hjp2%A4kj5#jv(1WJs^kzsWNyUfm&&`N_U&V6(!H?|h1CiScI$!4tZ*hsXEIw_ zD#M0mU3p(6KcL-fTa9W=ZOQM32Q^C|wQ`#;v^|;oKGzH8ei~zF1$^IBce95cN1XCj z0e~vR*F+mCx(0>@*LGJ2(??bd-p7vi=!w}hJ8~ruFpfWlxX<-6PF5?TLIErgpA(8t z(ByF+amb z1Sv#KyL9-qn!3V3(XXDxsG4h}hvDMwRP@o{QPUm-?1bayxST3~lYE!0*3jS76(*(j z=sa!P(Tr`|m@+RNe6iQ3_u0+ocM8(7jH{0p+v-rO&fJ(3Y?XNWM|~GYCS;jGTkk)G zj?@{as5rZEDCN;+q_oSpXpyxiMXwF4Pkw4QwISt<7B#KERd5^YA*P=fgHw*z>^#t z8fCfQlT~Oy{+ju5&gbr6ccihPmPI0^80CIUBxd%5n~rLuV}E;g&Q{X+_F1RW({*%c2qGEn4^!`i_veJb>V#%3mwD`7lLI6Y$JXiP@b%l+gd=@*k$3rZx?eM*y za029vPI_4&)94iSI&!v+j}Ww5bRnnS9N>VtVSI%VdDkj*l~^Z*AgzI>Bm(7R&Os&r zC{i-gUNyYe-;sN!Q`dqpKQ}aiAD8K^gSo;5LSZrCr;`x+RGF;I2Cjy2jyuwjlGeeM>1@ zq+b$;tAX1Zx&_@Iy29rT;RpuhJjTY&L~nixG7=qq4$qG3f*!N8Er($S_8Plw#|JF= zYy`9fE({?SdntC$;iwtbo|LW@LfP3P z9=}%%bPOAG>`B~pVj*|T)df;=-*BA`ZWC#l;E<~KOn4bvC~2Z;R5#B|t5_CaFIQ8# zO+{awtaX@uEJY#!@FidtX!+N@w+RIzM*`V4&V!RHMdwlP?C(5K=nHBsPs<_Eb0rka z^&;`Lc3KHJIoe6dDJkGNzG9bV+MpHEd9j~+FVd^O~lzgT{7)ZblsCxZZqlpoh*qfSC}WIRxh z8Z2-Pdo8R}RxpOKx}u^OnNZQ)pr_ATnU(|X>|Ud(zBm<>cV&80d}t0+uB8=a1^=kv zl;DI1{^6k+f+58V-=x=sczA2BiN9yBBU}fu6qeZdmp&UHT_uF(e?OdUZDLgxhctVr zFXWd{MGkjtywd#%v(TL9HEv&QuKNmwd;0KFbw(tnVou&LE#*R$p48;q5NTzHYc@)X zF~{g%8SFp;JAx9E`3K>uWK^iP=ymDWQMbQzj9Zdci()km4PN$?$TRX88e9_6@)>!# za4Q-3@bYYI&tJ;@jE^@H&DN_Ah?FH4&E`gdNBe0e^5FPvIS&fu7vkk9zjk)STE-16 z9?f(K=zDwh@}E>`#U%<%*>C>JaEU3NKlv_C1Q%|?1r;V4Ow>S3%vY~&nb=r*&rkwt z(%?H*Hld;)IsRkvm~v(Lo4+Une|W^S=3gf1f7!(PZ?o0@zwQVZOBy|Vk>k6^JONRV z4?>Jm-eprr&7ahc%-%i&)d4wG>Vin|HpEzmZ2VVUQ{Kwfgq$M7=2>5i=5@yUf; z8Ub`gL!VjNP)gOzf3*_s{VM*v8&lv@oJ=QuG&dxCORZw5Uxd${yA}p2`o*(R_v8i` zps==%PpS;hqhMSqqY$IZk&EnUw0PJpOnx5o=N;U{;=RH03FveCpCuv0!>;B%o^usQNKm^;90{kvTOgeJsIj*1bH@6gm29X8nF}MB+(axai#Vj(9 zPRkyRTH4Qfh{rD8IY+fI0vzpYA5qD?{7tx#V??sSWO2*WU^8qWWCh6rGq4>21PBZrNt1 zYN7xA?_B@N?`IA2LcCS6@dG-d@=KOoZpQJdW|B_S&D>bYZ%+yyQhTLj_R<`qNS$)EK*!t_B{sh{PlRsKk;$Cc~mlYGSPo9g(ou4 zoQmO76n1CGDq1W-e-#3?p?IVRZs-PP^iWu@7(j_2m8~|Kx_{D?n$=}iZVvUf*hf2# zKPf^J9>MSWEl$8!)h9w&1@|^7ChR1mnUxy$}1E`W5Q!Veag$=C4MT9oMT?X^FY^UQBxkrPn3ZPc8mO^p{22b@Qjdspn~7 z6fwZUr)G0+=rZIBE9@OSxGOelBcSELET5pamM!R*j6dp|~+8FT+`vY?8Os6kJQ%0f|F~8f|T< zYV)imKjxlC=?k^H;|hRCdNpT6!Rf(hF17^`(}I&ib=?L>q+IdEWeu4wJQ;4AhlIa7 z-~ATnUb}UJY>r%Cj>avx&tDjbIii2aYEC>JfAN!&q^Rxhjk7$&p4B_;QCK6`pbx`k z@uJl8WHkXFgYBZMqyHp)+Pce*mggBNK)TC-T!LQ@&E-h_K#tn#k23>;^Opq@DYx~( z#`1t3eX!WRp3hdbgQRF-masWO3_oMM!3}0wT3bgv!p8PC^el4BP2;%jrQp#uNvA9T z05CZf<`blHaTiLbEhWA?&X9P6d}431XYDpn+@K;;@Nn)iwiV1Fpn|?K-!**IPJR6A zA+Nb}+y_CX+4>Y<34mC&`JT8srnR*F=*GmwZjQe#w;RC*i4JY=gO4Xfgt1qM`bG2E z*s1yHza$D`%Uf|UwrO+BX&8NRV&iEylvE|pGBRG2j`c)d(eKYBCS^kL*VnG*7f7&- zoK{EIffehvvW5T2i|EtQPkk7CS$BiboEkMIZmR0)6k}_SdPv*M3KCUD7rUX5o_|KT z^c0Alcv+`IeJLq9HL3OF8ih3{#aH_?yNiX=rGz(63ej0Ao9bVV;>#jPEASY60jlbW zePp`B0Hu8v>FNkm=+bP#RIc@J7EGuT?(1u2ruW)>IgG`KYbTvB+lZY#wXC8ub~+Ut zzxYDhL?GE-2(F}o4L^9+Vq92S$JsF^Z!`WYSb8%ympzE3Ei5;V8q;HeTcur{>-(q& z@JC?pfCRrwsrfe0ia9x_I}n$up!QK%dojhxsP4Vdw6B5Y{=%JifX9_ifJbw1O$IK4 zgyqzjlfI7Ty@?6LM7yqch?=E9PuGbc4R%V$KqP1u{3oe8c80__fSoxweyCkQBN!PMXoLkD(Tm>-DYe|RIr>A8NAz-@Pwq-a_HIK6nvEsqWc zfd|t^Ck0xEhSEFD8uxCO_En4*v#AapPpYSfHPQumO#ONXX(m3U#g2V7AZ_Q6HGJDy zRH)!@!i+~=S#u~5-z*I>1OEz5BQBvt(&gu8r&&NXK}H?tQYBilwcN9@Gkg#0eS*!# zy@CKM9bnvZ6!%WJCA)WR;*`H&JuD(nk-FRLZ->;zDiz-JfPTc9odEta4^YB7vfsSB zrw(Z1GIs2ZP6G+B=Ut6>2_@G0Jjr%pO6})iKFf0=XL74){SvkXOFXGkPsUe;e^&T} z9{Aj@Av2&(WRX`dh{m&oZd-DTE_JVGhBp|MCB9}(g}*^|u3roJUhql)h$=6(33K~+ z%e9fIBt;t7jA^}ei;kT$S=7Si`2ytwy@kJEcFLn*y1tWm&=$irj}=${Q&`kq8(_o} za}K6hi;HYRx%;)JWxVJa>nV_$HdGm`lpjsXxxOR*=~bxJsOgdw%O}dT(y^7J(-{|y z&o3U{*WD*67VdT(&PD@HoVGy2o@9QuUbqXDHA1#;2rm_X5gY%r#TaM3X99+>c*3ia za^ke-qpcrZ`4s}ftIEbEyfz5Q#MV5pcpv+ZtG6`{m~^|omK*hYoye1kow>!dZLre_ z0OU*^wu=&Sx~7)bqLt}@n^CB&S91aInAP_Kfh8Y*Zh^&9L8lyJsqmz+JLPs-bCX`@ z+2#m%=D}*;I6Q#?v+b%x{o->7`PO+VfDs>X{(drn@GFfC4VP368; zU#{_(Hwz+GXJ_k_)#KRL!RO=2J_`Eq6g>t1U>q2MOXU|<-$N$Aa9t28{L>m@>PE27 z+2$0hhZuJotoLEc+YakddzO24f6HO91N;)8*YDBeRyW-X+xDzw^}d)w#>!m1h@=Z9 z76#x~;-t^rDbX|f_o!9TPPdQOhle1SK05P6tJMqriVB+fM>eo=Q6}4|I;Zt>C<}IJ zUqyhhAzMX8qj$@w~){g{S7lO@LW3BltcVsVrnm9|JjhR^=^}+u@{=;(n z-kbd!`oCMIJb?~}1fkh^J*Po)p3yI;yiRXU(;|~m3n_@Hn>J1%PLg$HeQmXUWWcEN zGw9x=AuKAS3fF0*in{(Oq$YJ~Cyjneev=R{{@F^TuhRK)0%)NydqNz~42W>6Lc%Mq zWCD*B-j|7~WaoVFG;vkoZ|kz(D;8?F*~6MX`dU%>5jw!UY@{A}vtRYm6{5OLkq0N| z;m?{;w|6Ihi%Y^Y zj7L@mXfcozK3skTr^vXS(l1sxkj~d3S3g-Owm*sETT?&dKvK?kRHJMHO#@Adf*?;r zshxN^C=Nd^OGd?~Th}Yw(y7Mc^gY7M?jF;?Q>kucnqm} z58hh36c19FUc=V#c|#}H{Ax?7~~4g}VD zRQoUVRWsZ#k(<|);T#y!-rJkhE>X@+l*R{@;jVYUe1pGWgd3aML*`9~eB+*L8I@8} zi;)JKWOls^+9Vm170rw+7tNGlXzv26E0`r>V|sSK=7b zEW*;uB}$Q^BB{SEpPytTy0UT>tl9L(+^dZogKsLS;cln@1!x;3Q*SU389eQ_&6WW9 zgxIhvbcW(slf5=45Z_=qVZ+*HMW$YT7~y8zXQ~Q&-Lzf_1cFZS@?%wT^Nbd1M9g{3 zIriQ|V$9am6_Z;Zs2*Xa7yucx(VLm)!ovaC+xboq`Y;GyW@*g!+<3Q8ogtl2ahs`C z3}SW`O!__iQ=wk?*7*f%XPZ?wp#reudFa4-Fl5iaPhuX_D6l*#lL6KzoU-OkC}Up@ zw|fAWTZ)XSrl&sjjmu*z7p&_yGmf2bxG5BOv+imJ(+<%W`n2pfA=cQUX&Tln_dl`f7@4(i+=(SerOZG>KA3_^D-6z92y~otg zXy5HKeiheV`uNx~7)9YJPH1$`%OV`J(Zn-vVwUi@i2PFg(EwHA6pw!Y5u}fr1%_?s zfWNG_nKa`cT)+_(V~OVx4eUM{pZ?C!;h-np@`lLC-N6gp%rI_OizeLE!$xHjIS zVmD`DSNl45sg8g_*t>U6YaKVD)#Pr&8d@XM@Ljt z%{j`PVv-=`RYv^#kC~zL)hT$(#e@#b`p5d>71{(kj(%YfC`ddL{e|pE_DYvQ@(X=|5 z+a7IL$S0duP%G-VthG=bKu3yhcwry6Q;jILM)W#IKR8}I*OK@!>~n@IEyj~kN`V1f za^2zp)7Dz*!O%v}*4dzI7kVbQC&@lcEYP*mxmmn}YpO$R&wGT~M60_YClr2_p_^sy zH#XCmR;YnF+~tZIdT33a_L31Ik(9z%)-2qQJaKJKT+o+>LK2$pZlf>V} zBu;uBBiSBN=RX0qPCi(~t4%cSwYmFG#SDP;K*hGlC6wjmH0J*F69ijmx;V2`6WQunXjCG}-K?wjCh&IV z*VNmDzfpz~jI73KPL=h4g2Srl{P z6`X_^Xp7~5v`T7mKDNa5eJ9|_K6c;jjB?9;k8%sFFHDAgPwVlF=r5G#j=-L)uVW@0Au#fSFR~j=l5LIMWbw;jZ*H86ZVl^^-V7^M z++Z@YhlY0_cJy>)O-G8O^&X#@R*TVluj!97a3R&te*vqi(!fb%2YfSQOj(UdD9V*4 z!(p$S_#&0vpzKVBeg60L-{hga?D3QS{Nnayc$P#)O15BNMmaF?(dk&JJNFH=6$C79 z5_yG0{ro*$P({>pvsguHs@$$lH!>{H%jZ~;RPqSANY7))#Bzfpf_vy!0;$5@Ye@A5 z|5|_3g+NXB>rFA;t7nM=xHNLJ@kT}2gqU)8alDaVIorpI&s)7TH=B-tiyqn>d&UD? z%9cfquCTok({lmhi{Fzi)6HS)8oeGn#x3y;`v;hN66WwPf%9!F&7|Z; zPG0XCL=t2x5HK?EY<(~AzR!NIl=}U(qu#(dHlV}R?E!$D2eq-X?i=>W`i?>{RYbA9 zk1|$$!$|tx>^k7Mjn}a9OYO3y;e)YOJycGYGjt1YJ+0Q8)h8{d%E}2ah#nh|S)-jz z?|c3{pI$;LHK{)v6P#La1?*ssQ+yz_gWyO6)d?M+^kskRj^nsk(=n~~;fg}!XIUv)Rg zg}VlGMnIUEnszFSF7dgrFDX82)~|2@n3}CHGBM%?!mb>zHo(a5=VgWH{lKAIPUqqL za2s6-mPh%nZrXvRN3p4{?#RhH2gtLx^ZJe`9sAJLQ*Yn}Wj0U7OFEm6UNn)$=AP*y zS;Ue#jC?B>Z8R!?nIibky7PVbeMr%EW7}=1Pb{RR@>at=03H2o^dm|I)fdoW3xus=|eE<4k?m;#&!zRNaS62s$4f#7;%jIH4m#828&t zb=Ly^<~T5cGZ0APP1bqwZ`$K0u}P!m_z#w_@gG}g^B?qN;yYGp9rFb@h8^-uU|h%TCy_im1uZx?+TR#Muoz#eWAOB09kPxL98 z+0;E=ZAi{5TP&TZ)5TA+!N3iM{IVTp& zbS(Yx0=f}f$&jX7UPe8?UVaaTp{~NUJk$521_TRiv`t>W3+yIgdc}0dA{EZ_sTCSlP_W*-8Gp=zi&n0ASo3bav|r=Z`p&My*BFD*NE(hg!2P2Shy(KC}>rWM;%k(OSLI>ZmaK1R-*;rsteU_JM=Bj2WN}<{l2XP4OAk~eqP2VlKrEt zeA*Gvx;qKPJ>FcB+ScEHrcH?uJQT;EZ|oZqvfAx^6WR2VY#~jL;Vsc&sKPh(2Q!At zW4Ys1j;h>KPHB%wtm{x0wfZ>#fv9jYjwI!3&Uc`h2555G_{oC(oDWm~U@=Y2Fl5!= zq=EJN=8bS{-yR#v>*WlA{mGJWkYX`^>^jQ64g-ri_Jm;F3u=UE>gfUtoXPLx9tu>v z$uXsovdWOoU6d#)Z%9^C#A3hyH9EKK{S9Z_Med!vqm`Ir0|kG%&Y%~HBf49g>E;bG z@29!hWuPuB85=Yo+;1Lq*t>qY;^}vCJt~DKzCe-jdoH@+y9+O$q^qvjK9`Y!%PW>& zHwgS+bTc4;VN`TTwOFKj?Vbu4L9p`%o<(BTCyYg)dwo`4X=*1vW27yzLfHq-E*Eg^ zLG#A0bm+&S+0S}-cv|(iydycxbZ`}>5-A(&4Aw%A_(I8`?73qOFNoioBF^>e+LBdH zwu(NPoWA_leO7p+L7&9lqBIz9KP`Qi&GEZEzj%)3LV+bDbt`SR2`^=m!TI6%LYGL1 zG3MJD*o3g+Xo~K~?PJh8ykpubrDtRc=Hust*A`AE)c9J12rZ=6oX$OI%2+faFSj}Y zwPfe$Ty$}6 z*{6yE8BUhp&~XKQ$fT_ zAy}ohZYQ_)EeO-t78dU!*}ZVpM?U;aXWPLg99)W8YzQ8d7-1Lwpz3Y-gO(Wwcdctv zP|TMn-6%W~rhoi3Bt3AqYm@s#m!5@xyVRJVI~;7f_T0t9?3G$G-(kj=lpAZ8i8n`B zB-(B(&z(N@Pb2=b?D69x21XezJIT_OqpYXbDmN?vL+P!shkHDPOP2{FpX8l!+SKH` zSEgDNe*CQFOX>IYmKB4KW^5vBjKBG}$(Lr`Ojns(xp;L3h1xn!mQ9Gn{Q z5z5Cx;a@iAM7rWO09Yji(JAgKS?pqxAGuq-49NZyID_%1&z-xIlqyw?jdpszm+;f= zku7Pa_Q|P&-(UF|3I2s2b(L5zttKtm4eOGuNZFm{U+z_KZW=aVSr90-@m?3gnl;<( z{v=St3__^9U{8^0_$A<9&bz#Ye=zf||D0?w^iP%mlK{Hx2Kv^@T(Z~Xdd#xw#Sg&f zkCy$E%^AlhjhxG}G38EtUx3N*ThE7D4tN7s57oD7@BYiO6XoqE_;I+T zfQX*BYRCb*&2~y=`J=qk@`vVdr_**Xtxg1g-AE$nZ(P8M0E*S?R$S}f40b~Oq)YD= zES@bz{T!CP3CvFdD+jB7e%YOiF@zR(V^Fk0R+!G;!A= z+mGiLo2Q0zxJKExFPfEuHlZf4j_p&^6pa@u(znurhL25QE)I9J=g_z$(XXM&{-9pi z7>D2|VGAsfjMEC96P}}uO3L2RVM|J4PpEDI(%fq}FJJ<;pI1Z}3`Xs_lL*HrOR$^R zKek2rFa?Kw<@>0Vk0X71EwiK75F&?s%WZP5;Mvc>`F-9aHPC}I+GBZ&u`KJ6M`Lvh ztMtuWY*)P{Dn=sYk-i=j%8_t0UTuwiL+{X`E=-ERIP>LjGwG2S9JUIiuhjxddqxT` zF2nZbU;B%H5r>OPMy@1vza^`?RZ5UZ`$qj;O4JCJw5-c)pwwGWZY&$P@z9@AU2Y;^>?j;1kuUrRyWsgnW>ak1AFiO^I>L4TqqVPqigH`~C#0mrp;Ji#X{AFE z>5vXd>2Aqk2oVqj0qIm)kQ$^@T4D(4Zt0;JYQ7oH`OZD}e>nHvZ~ZT8u~=)?%X(+O z@7~Yz{9-?wlU~fc6T;HDrlti?m=wR##6;#mpPk2;(DJp-_tbl+!9Eie4gSLJ3<`mt z4@RnduSBCTD#;L6O>MUu63Zfko|86S*xQXKL%xlT5S-C%Tx_2o z%s3FvCRgzZf1){G6>VMYl>}Mnu92and@t&=ldwP!-kL~nFVJ((&*?irPUYZ1hLsIIyh5GttW2S5UQ;1o+=R^K{yv)gY;k^9D^NUj?zC^h!Zf+?O#8Kr3`!crE#fBh+IKhaoF7v?+9sDVV~YJSxxG&O@+;%LQa|HPPO`7HZj5X{ z+wg3gXO?n#YF;+P037JdpZ%zk^7y4NHtsu`x5fOcz+U&VHRoGYz`EOYl6ut4?HpW< z?JNYj9q6a)bhQ0hoj7=mN%(V2Q_)iIAZu3RLfZ;bG&4hS5#aK!Z=fI*;7;f`~I5%*m)a%9r@hBsgL`Z}WMV8NPX-X7Sj z>gIroi_SzYO!scYd%T-@(6bBD2r8ij58C|?b+#n7<+$=dP-#?d>FJChvuJuR-7^~U zQHK~n2iOcWd{C948=_V9=(BXzrkwsXT+x>L-1jzmY-1bXCk(NDjHrM-VzrC|(om@7 zr@pX2WVQ=Ww^>+dx1OrBL(AOeK=kK1f6<7~g^cF(Bq6jyNO(jPeRDcO&P=I)m$Qm*6F5X(ekF z?DRIa{bECos>a6p3k&j2^ed}2wV$pW+jUQrX;TFgr>HbmtyV3I#B83Z$~CHr#m{Cf zu9Nz3a5~b8Xmgmob`cH%8jo&wA0|~r^E=#C0CS*7{xj-YGm{=-$!Hv72mf;{qn<(J}$x zctVusI(yE&C{C8nzz=5zUQQSHgdgWiwG_t{iFmVDk@cGW+EVg#9&qcTrj43yW|?K4 z9?Ik#nVIt9Vhtk%E2FMSh_BIQz|WBu&aRbD$bu_hIS+BtEo=#`<9N3xF@ZPKN+)aC zjMAhc3iPHpSRwIIDHpml#LUAE4Nz+kf}?{qtaD-jT%3eSJR@<~8F=>@pVjxJ7Pnfj z+>Fh+F7%gB`=Oi!n=QEvMYmFTwnwLd`0Z&(CW505VNq&c#h+aN7z$DmF=_kWZ`rPn?3O8vXp3OS|bFira8`f(RE(TurL3y+HSe zv2g`h*3?sP?x1Ym*;FqNT_1=PX1+tI&^h`%J8!F5eOgYpehcQ!Ami(Qa%bu__5E_g zkD%_pqE7s=)>RUASaGwGNxwjmKFA+aiR%2kGi1sXpjSsE@F+6ZLYA}8QmiAKo!}L! zj<~1;A?=@%&q2wm@#u2xB!80irqBzkh5iZIjZt5aAC7p+>QV_dSl`T#a&a3w7#Ea#7~o-1t=N#t+0Z1m<-Foonc; zfkUE`sgc}@mJfF9kJqt(F3L{o@$T*_>M!$NsqXUYrRzX`DmpsjnuE*GFpa19bG?`w z-4|UZQ^V*`>cw$e*V|D46^v6&DvCk%6zDge4;`kXHmA;*v-xe13#i?zN^?%bN*uo= zjPE!$mO;M<-L3o33u^pP|2~hU>3$!@@eq7~<23~zB-=y{mOQC(up0Anp0$yE`^()H z_c5)d@sqmKy;He&q5&-dax1pQOhr)a8W!{pv+Df2w1HLg#Qn*t`ld7%a1~4bm#K9b z^MVC>b^Ip5Le!kl)1Ux8AdN`Cd2ipvfQ`lqBSxMts;Lmph>bdUU(O|p8X@6`UgOF5 z5NZO;W}9fTk~J2s2JQ%MPqut%adcEZ`_(ehHv!qgQDdx1817&O- ztVsmrSd~B9SHUf$(d~VoCGf}2qVat}WymT6g~wvvI9c4W4hj`{=s_1Gjgdp|w&n8uW~9 z!9C92hp%Z1jnevZH!=6Ve-=m-E8{p0Fhzab^I~)l$K|&)FJ*om^keQA_#Zn1`g)@i0WgLWrqGbn`OV%_hZsmD81v&tcaXWMQn)? zFm!#k>0s?cQ!KY*9SDtP=%XLuNzO)IEJcMKS8)Gk_0p#TQBhCAEK9!@92852jTMQG zpc6}%@43Bx{pU)OudHS|S|;;kpdb;IC*NL2r0_)0+BPm6?JAIoV_5=SOz$TqWpE0b$1|2cXxpP1bZ%(o=MDxRzh zGVFVZl3L+eD%K9qTecX|XH@tAk<1_trknN3PH|=xPQZY8muGs}STc?`!_I~6T*N@ViYuqy> zDzvdZL}BT=yAo^70Lyk0M#0P-4jX>tG-@RmGHyey>$U9Gyux7mRP<7Q1l#Ru39x(} z6_#TRG@dLv2_ge%Ge&VF%CTId-)2F~CH?)QQxGVr<5a?8ziZYuUzIQZl;stq7Lb-v zhc(6}I1yLOtZEd0SkQ9e?{h8Ohql$H{_(f;R*hxmAo@-=tgZdQk#dO-#n;saLt zO;ye|>kp&oxSZ&&ojxC{a0z5NS;>XSJopRNCAc#+pXsOgEn4j~dGai1@&?+CCY%5; z9y8R8qGCbp7)uO7^wKi~y@$lXk5?Wx*5mH3;aP40Bi8lXY-4GS-6OGjH&2%;Dj%Vi z+S%!A+!g|lOl8XJJm?sy#GU+MvEDYDqYir6__dd2I;|#W7zstQC2Qt=W(2D2g3Wak zh$G0~hFK>k5|Aw%Ijp4j;|m_n7%H3Pl+Jo!ow(aR$wy5b38gtDR)_yQJluVDk?J_6 zrxxz8)VR}hFosj0u-53IYTrs(dF#T(>)zb+L-k%1(avs>1q{XSDRX_>O(K_ev0N#1 z1OoHNa69Fr4_~C_CKG0zM<67C>-{;{WMz{3M;#@n-b>m!u|bw^hL1|%GTP0RzQiPE zVAE~=+@=$ncq&pAP^>DUSf6b}xS+D8e#lijdsy9tqoptzq7!spQ_DD_)plt@3caPd zvI8)r4TB2SZojw_fp}VvcH=#COD}P*E;i62X#}_lbHS!1gk=7Xggt8@sqhd4an{p0 z$f2Mc?-z=&4Fx*TKPkY&;FXDA+NqhG2V`P3%pX!)AQEEc~hRAr>FB|B}WR1l~2!C6i=YI zC@t69PmjrJ%75u0s{812YW4^B*lNWwFf4Z)8|O)46!r#X=n|BLdq8lHs$tNCXB1p& zN4Lhj%$bau!!>xxj%=9FKe+B*$g`YSNU0ccI$IjDmn=Pf&SfsDoZRmXJ6woTCZDqr zV2xGQr_g#_M1CPVT7Ka*#?@0Y(}*moOH@$*tp0F%84$C41of))6J^N4HyRR4DZjTD2C@IS9M^h zc9+fqYgAT>tV@iLK&OkZRz46R%^|Adg0Sj!8s!;l$37sC+3Z7m& zEL-9vy?d-#K!(&}kU?8D{}s_0tV^D@^_fCr=ih*W&GdUks*o+1K8`c@Mi_lNwF^u>U4@8LW+<7`huSbx>%UX`T=6X%u5&OXOl z*!_m}1Sqd=pEO^i&rb|}C(JkorOL)bY@F7Ry+bm03c+f=Z-F+=tqF3MNJ{N8hJT(l z4c|X@^ZW7s4|s%m8?-4#uTX-aE{)>M1W89wcL!0^@Kcv#lC9Mq zrI5>#Pn=6<(7KOHNOXQzREdj?9!$Wa$}cjS5oQIv-=g2$*)ModoL=U++wJY)9FS-+ZSKkz3wsj)tII$B-bkB){Bk9Of|L+ZUlZH>y zYNOlXchKEtlD|RO3rmbaEb38m#b#zObqB(#ILLC51GH z63eiuleM}T+WNhE=W0r<3nAq`ooeSD+NI2iNQh}40v2-nDkyV^bvROCRIb)*B+*&r^rDC5) z(W?wLA-~{1u*Zs~!8|Z-ifns>Xf;>3@i=jl({WciE`?ms>&^%q#t70)0YDkj%nj)B zF28gl+4?pWRJmX?(+NzA=8KU9CrriM22U}GvW;Mq-iZx%R^sL#N-RuEsI?X03r||L zQe{Cg25X$&$~P)i?4q7~-uP@-y$)#vfT_(DwAyMLiV+9XuJPB@ehz5r{HrDlnJ}n! zqJE8Ax59jA_Q9!~+w=)zM25kFsW4~xuS=Gqi=b?eD@ z6m%9+q`ps}h$m@Nl?wzzGcNhA_K}fB(t6U7mr(zJf~)sD(A4rrI)xGUSO_vrI7+cI zl0>Je{0J0!ltB(DYB%lfx%l|l=RkqByDTd9_y<@jingUqCe{1)y5Hy-Lfa^4eN@1H_^4Gx{XCz$^3AK*S_f+ZH|nrCZ2 zzBILhit1lV%=blfGuBsrWFtUV(WAf2EzT|NJ!7pGZ^BrT?q5j@5nbNVYfs(^1V`eh zuAOcKk+W;5z&|oz{9KMd|>u-SQ!9JImpajMoEVLxt zp-EAUFL6Uw3!xVxHT#bFJwpFI(P|n`GR2L>M3GKUM(cZKD8rPv#ZE-kM5-gYvGlZJ z#$fCEC|feO3IT;}Ow!Q?Wx1F!OjJi4>g9W?knp!DI*rXh2RRZp?yny-q@>-2@oD6G z=*YM$K}H?*AD>FKQZlg&V`l*`7~DAYzh?&kDY(6v>r4TO1KYxi&BBQtwYwXDv3@wk z7bn*cEn<7wg(==3qUO-~6LieGMX{ASAo zZu8&qb%gJfOhzp#>A(S!$99lKjjzH#dF2zXo20x`jIz^|#mq&hF`~Q62VUs3AeTj# z=T7I@UKK(#CW-worl+=siSpZ{E54b{&LvfMnO|hh+E<8{r57a4E|T{5{OdHa2g|@j zZ{7rRrz%r3$(e)YDWMLjEzLxzUdZazMf1fIwfhq~l>ro`dhrO#>_M|sI9aQqhZJ1EQj?hJ3*I13@FsOcF zw$MS}p%r-FJ+l#)>U92qO0AKp_L)&_$0}Id+ymMYNTF2H)_(ETO9!9rp3Xk6 zgn8erFZ>E0}XZLxw%pp!h38Q!%VQEOGka)sPQE0@x-n!*~)gqq!ebHkn5$eec!gDv) zsgqMChr|F;gON*T`HRJ)D0{CRfs!ANY@(upTbFa{9HRAt_EYgnC>0GWS_r0-jy8&{ z_0&CqsFxXCE_o=eYlupGdcBwYOtb*Cgp4#`zfKE*GP4&2|KxxhyB&rwh`AEs0LkK% z5iI}_vc^_L2=b0nQvamhUw`uN_IYg<$m*rYM=c+rESO#*MHy`kR)(H9L8V?o-i2duc3Iuzpnf}QM@9u=*?3inMz>q zG=(ErXs%<>jvU~BO=GJn^5~7`l2&j?sHdMn&?|wn(e@QE6@F!~7#lmCUD9rPmZ=5|`mk2v?ar}Jn z)%{(kaWz5d2COqNW8$N9O}OWxBfPEtyvb^zob}ZAHqiSxQ{@#~-f?~3sgKV9LTC4u zI3^OhI>mPSt3iz1pcFaBwJG{`Zz{htJ-3_x51Kbt`l53G%}Rp{b9(7aa5V>A@N%|NBFkYyb~ukNj9B&fUBg!4g89zI z_}^mT)V@`vk}Q)lO!Rgguf3s9oF+i;Z;V|F^;>A84arh&a0za!MWEDZn17JV}V2!FtX&v;LV5LbJz-x zs9CcR&)C!4W9!c0aT25<_s17hMDM8;imBcmQu2zxCsnU7q@>hNqrMK1@b>FUO>)VS&VDt?gpzA6FcDVl;%P*ewa~_xy;3_ z$<_K+DxW|CndToB&-bw&(Fw&Z`)N9jJGk&^^9oq=88)ldUT{Hg5|<@Goae{di?f!b zL;OGF`a9ZtPmSly^?=B55X~NpusE#*PydWJ`j<2f5wvKur1^h!VjA7*;Uu0zFEw;S zu8TtMfA%=7+}1F>_5f>8ciz4o`mR{z>C;)=b-nx18D+0I=|Am$9=h(9NuQ90cDH6< z=g%Ie;EDo+>xlM$4}^c{##9ZR$c{i#Y{b8}2vu`%d)5%>@?Q&iLP@HJ@~KZZ({^L$ z9z=FT6M1?xLKL_-8J4o~|K9F%p(J%dI z&5*zP2JagNHKo``F+xXa>5T7;?m_B#f3f67rgv7QH@6_I7hQ3RNMxd#ru_oMGtbZ~3~+yriGVmR+78z$J#@3D;gXag!BFN9M;dqyTEbq2k`Y&#xHx!%m<@GOGr&nI~1D<&g zb1j&cC%?DwJ$V2Eo^Z~l)exV|WK~x=Gud4E2?kO6b{A=ny8gsux5Uq|D|)(CEN~ai zSR>D1P3xk<8-wEZb>)qGX(Z9QA&C!CXg1Y^Ui`}X^@I{}s)I4v+SrXG#jV+!3PNl@ z@m+Js7?sc>dVR=vQ5L`@qpcUrnbos>#mN#aMUpw4KlHB;2+g zgRl-@kh{J&_?fdKSVboDMt;FN z{N)2gkmN?0s^sZY+xx0{m-Y{lR9Rn{%IL}A{BJhdMPj-Mc8OQ#z}R&sCnB%_9pS0p zi;7Et*6QVZ_|}Wq-NG5UESikH>HERRCR6kz@6lpzcXyQID`qjaFF2{~4~CB&016Gs zJ5s+k#z><6TjkOpW%;7svjH;>eUU7$we8rOl{Qc{E@hXl>pcbCeo2=P`J_9I3eY># z45O_o3Id1G%BRTfFSZQaE zf$C&*8BcZL%HqD3T`-Q#`}VVdh=*2#E6T6F)Z}Y`F$@;zCsN9&;H0EM(3fv{_;;{K z8$~m;SBq9^pPIsUf5jXDO1pH=aZP|jkGs;@`%O7??I(OJjpjXuToZ7;mg7u;rQh;fn-%Sp8ey$I>c7~o z!4u(c`OOdGe;umxfw%mB+psYLcwW-Rm<$vQ&C^bp`|(D%PDCB4fe;)=x>_xY|R9s>>Gnl_TMVhgq1et4#82aBl3ir;p5NFsXjQCtf}~1d+gg?6vU6*b7iSwN#nr( E0UQjieEgKRu_aCZyt?ry;yg1fuB`^FtM&f`Dt*1h+gI8>E4NCj<5-<*0Fhg(3XPbTgDfTr^h3%gc%IgRd0|fHZayR$r+Ha=5)qYR$09XziWmNZw`ZUMm)l*OPE$P>QrSCbE4M01LP8P}X3;ypdvJNsrqwKZlDUkX+JsNe!f(wG zo|V}<=n;0p?+U?M(R^@^Z-!6n#!w=F-OCM+5;O^jiU_-wrv}*oVd#;l1n7;2OS}t#n`1Geli?i^^vqFa`Gq}UX2Lzc2&{50=g?-t!)jLedez0TabZ=U^##`BUzH;BX=5h*RFiyqn2hZ@+LaiZ44MyOy)8ej@S zbeoKy7Go!8ryUw}moh`HVGTv0D83dJ0+codcHC5ko~||f@1dN~6j0o?23fDS+#J&| zs7qF1^@q)W`u68%js;%|)$HIbP+c@sKH6!Lg4WXzQ$l8rG|DaMIo=>7?)>M(pAt`j zcQUmip6UM2+p$NxM|j!+T$IS_BiOXs4_t7v^zg^Sb7Ue+Hd9k)=HHJt+lR(5C8pJu z<#Q}rf(l=Wjh=MYnJ}=nkdb=ZuGmZ%)`dD6C{&Wyy3U_XYPc6+ikpn~AWh7$?Kb0} zD24b^TZ|Z+%Pbv_2*U|xLZ-%SQr^cTPFY%4oowC-9TC`-3fOj@Zd9;nUy}0aT~@C+ z1V;08BV+580=S?>b8nuf+h=_g{0Ns{7!t?g`Z@$-G<&RxZ)U}u6T{d<1Qs+Oj zO(U!yeWS@?nrQ){R!Y7n_*~Ta7t-YyMT_2yJCeUR!j?54EKa6O<~I_Wj06WS|8>Me zA_z~()#*zhgHr_5pW*O$AwOkj?M)<7R2SpV+N|&vDsWEtSG`B!3KA;Z_yN}h_+je8 zea|Q{7Z$@C$aGpuB)z?v^@!R1x21_5CrPb0n$nGn+E|`*uHWRYs=ocOBAvBezQp%kc)X-JKM%NSP=>>n>z(UAGtSQ}#pL5iC z_>xE{YVm#~lTXY1P0V&q&WR2q8PPYMJP=@XK9gq2wty~W_c|qFQY#k3iq zz^LDTt2u36iIu)y`F_5&-?Ktx$}{4^(AlFNOb0F2H(hHCZB7=JBK{jONja*{Z5=I4 z$M+6n)x{KjofHcf?5-WOJDRrBYnH-jg&<)w*7FwxNR*=x;Chv)Xd za%P5EspRV>kDjWw8f{XolijxMl8`f8CpB+e^;&PnO&g3Y^|{mBR(?4Y>Cp>I(`RwR zCYQL&cfmvmWRQQ;=|yK>(m<7#0!yWg1*?bs@F;MyOeM*F=)&vNP1>8ju$VJXH3sNT zOf}dyGn6HxTZ5!0(HN(9<2+J~E^(#kLqp8#jg3FCB4j;w`M53xux+u;{U%8RK`%%` z%8E0rTN71E`PhuXYA#!j$82@ZDpm1a!?>u)V8NUvwbaWT?M(=B<8zv9A<`#Zskm5U zT;QMXr5D7YI~%cu8uXNCYlHBaXeAL%G+Kz--+ep>%r=fZrS78z1X2g;7B=GRO9MLM zs{wC@um(#Fv0iEyYt|z$^(yj^r(%2QguW3qyj&3F)-naAjv&H#(g%1s{wqV-;$qEDk?iQ=l{+^;lUOLF zG^GtvhD}0)tbV}Ou>n#x={T{e+8JO}KR`*sYGQ96M#C!s_4U`(QJ0!Ql~86DY5joY zZ4~5n7~$Q`Pt0xNKk|?d&ll=$6snAlho~2%La6kDnb6f=;qOEuQsIT6$p3v4`>12p zC-H^Jg+Pa(g-6lI^h~I|gcRVRgBKp3lLA2DM2P>>i+O5O;6HobzcpVHL+@gE_sDgb z5fB#^)`CUB6PA<=0h0(l$Q)Z@v=r_b?6$?fp~XGj8uwK++U?mQdbxvtZXh`@rH^kc z(`#s3vI0_prtL}EP08==)hn;Dsfok4gGFu>NOyaA2wOgR1qB@uP5WvGot)7TW60vs zxTCw#0^LXqywU4KX3E)JI|liok#=y5bGoSEzLuQyX)`O2XnD9{c_6}m>(}SnYC_C{M1AjxoG23nlUKnlLkV7_oBE35s zr0bQq+zuYa;W+=av%3os0w0|NoHa4Z9>1nmVyR(@WW+-$AmRz_-R|19jd4FE70>^@ zlp?;dY!RqyBUr`ZN`d6VVuz$N`gDIXc$qG7xf4DapO`2%(d;0b&Yo#ZOl!P%$n;X{ zP4|3%c0;fB-8?aE&X7VW;U->(CQSIaCce*YO&RIk^Ny(N4OWu>=}_gO0Evk7YB7od zdnJnEVhd)e&gFCVyA4dVZ05lAB>#4>>KnyNSyX+5>;-8TVV^1;`djKTpKlprTr~ev z!qx7eJB40#=}HMrhadJ@Bq*nl<98(V-{e6@tasQ6T-JVmxwLz}!`qs}Tj^e?ElN)mutdZY-IM0-3#h-p{YR(sRjutFI| znyi8NC!u0E15}y+*PHtv$u6r~lr*q+5i0g@!s^;#i~BmqXNN=wkCFnfFJ#sw3P2Ku zQZt?GDor8%FU^4bDEW8ADxZL@@th`Ki^N!|eep?tByH%G;j@SLQ$)YR>&ogrIeGUb z==8$c?eQh8Kq{2sKMahEmIbHZ76 z*YT;ZuBx8Mho_x|y~S#a^Bs!-39a(h5AFK_qIX_OLcTsBXndN1>tA>J>IQC|{L zj+FW@;cOGGmkr^tmV=FnqC=$1behWG)WyL>Jxb~88Q^{O3+8%Cfs&z{g}A~Ozc!gS z7O8zt;fd}#D6IdhBcck%wQW?`NVd*1+dQs~TCzhUJD zfwg_)C=&ZH0?=rpO){aXD72D1y6N?qv5eQv29}lKz&Kui3JsWvVmZPidFU{|2mUW+ zoX%kvOtE8kfHb=-8R%JZ0XBD$lZrXnN@m~wZE;%Sjrws&Yu!8X2TpzjG$wY;?%(1s z7q7dGRj+rUn92MZ;%Q%M9V%RjmuuNm#L1oh>^5?WnZRhcq`vR;RJa6^W^kL zWORmfKsgRz#t%>YsgB3>*Eab*_Osb2PuXT$`sp0 zPcN8knrukePLXk9&Z_NxL+y>$ezD48BQTg8oLg5JKqALTu-wzRx!OIAI4&dVNYr&& zBfik~hEg^`2^xg~QZ5`<6RwR}j5<5a?Lb;Vs^}hvJ#t{Kx4^{Tk8<$7>X`JSi{x0< z7+-;;;M#Y3Vro!b?(Sq;*@*I+ar#js+)m67DmkNOj@&o!9Fu?!-ungUf^s{>p}bo! zS-}EQ6f369hI`N2gLSNx)INXC6{qV<+aR7*Nmx7ZRnE`kB%BcXmmYT)9e=21aC?>J zRlVL1zl;uR81I%^7{A0-lU3Oa4<0S6 zLp)XJLM?6jCd$|eJe2G_<%Y@hIGfr1LVczPW-rph$NaU4y+yL-VP|YlkW4-5c3Ofg z&B_#!!JlL&_3U;$*e1PxL@m3$z9{{!q^jYVWgjJ=OsG9i0_m0=HVxK^^xiF!)po!N zr_SgAM;z3w)~43dV%C)+-~$2(T4SkJg7L zk{V~Lf8~`?y7+ybEpCfSTPnRFzB!JhUMwlM7I-bSnmCKE5`Nv4kk!ccm%|^I`y82D zb((e3vcmi9UPW!2U91%|XU|q=;we|i62dMb&iK4R!^sX-mxpdNT!V+|_Tz&l!{#Y|-f(coYypuZEe#ArkfXX~_F{)tr+v8{HTkRcWwJ<`TaV#lFQf?@aMT9l{B&K)s`U^}~ zSgPt#sYdv77=q>rp3nXCblp3w21;x8_5@iTg0pU^jjPx6@~JS_DVhd%dJV=NRqhMV zj7od*<7$}g3KH;cf?7}<8%thWo6=lNLAD8cP`=vx!b$d&Xxy}@ZmPjTg5&1fL6)sw zoT^hZD|J==x`MaO+jZe~qu!qdgj?IU;hsGLOF%>2OVd4L;~FqG(41>?(W-2k;?eTu zN`V;@KqIkeZMx{xeuxIfADqNg(Px>;wXkruTf8`pMcVs>35}29X1{o zmeSkJ)Q%nPKH_TyI?aSqSALIAOVPN$;+J3jZI05lRO1?P&5Z#Ux3}wC!j3XM-MvRl zCCrS&cu-_Fu`4u~s?xw}^v3xs{`6@L*z9e`08ZRTG~XM*z> zWWP`DBA*R0kYvZ)Ak6PfFU}vKC#PmWMzfUWhtvgf_n-)_*d?_$2QqPxGkKVC8WEYE zjof*5ROQS#)CxEeb;--H+b?4-DY;Wr-M8-&nEHU_ue*hukI@>=T>Ko9Li$>9xQeN? z1O)uY_j>}vzD74siun1*m^OrnH-(KHY;&Po|;_C zXj2vnBldg`Q4%pZ;+CRD;Z77r@Qv#SRh8?zENZz~+Y3ZZ>J94FwQB2bK)kVd@KJTR zgZ3eU16E1;Sy zeDJzVj{SytTqn6(J5}Jhcj*1n#Y(X)$@=21dD+t9{2}_Vpk)Dc^BR_Rp(l9{g4%v`vrQlh?#GlK`ne_XiW znM*>Rs$GX?`bK=qWs#!e{e(#tzITS$K5)N(T`#a4r{dlPLk+4Rf3%2O47cB5Tup3P zbGAa}7Mx)_7&u}0Hh1wGEvKq%mZCPcKqz@u7DuvJNC-A)%Y z`Py929ysdq%(PW9gE!5XgL9atKGMb+n{lXhDlpEk+4|T}8)V-%Ralz*=0 z9J6fc&}~!)#p!;QdfO@}OCg4>IM0Jb!dZAh42oWR0aBanh|>X_gHfPZp)Nj8Rx$jIyf+gipj9_Hts=Kb(%ux%hRI}eg&zv|hdYm%xMi|dRMIQ(&K$Q%dBlrZKfy=yOpjd@o_ z(OZg%c_*p&2li@XJcz*m?Fp=y>U-4Vr6&ut|Fy`jJzoWvyV~(nJ#Z_KdQ`RKU&4n? zK?WmseifvN-lUMRU|>HY@SqesYjCip$EkKg;T!1lNe<_X!AAZvm#wXu{0vfF_+5cA zkYL5B#gnnFa&D5Y{wuswUXY=#-&bwf|Bf-Fo%v8S5PIGXcMv~<^M4CKjCEbF$kI40 zqVgzDH~ylFDILx=BXMU|@J%caqDCyw+{xON}?O9Zx1wzKH^Ho6f zl3zS;hl4TpuoFNCh=_SI$(6Dht{i1*UXzmoa1YYCU*QG$plxyMmSx4=x#^S3gfGR# z#O!t>d#g3eJ;Cc7YwoL(pNMz`H8s;Dj+E`839C*Nc=C~(Y^l^_kKmSTZYbU6ANs;E zHX>`#TkYH*JxEPC{(zI|3=rAkch_hMyoCxg!1>6PsF{6rVF(c ztDyx8vo-HHLCUsEXO!}SvuPIAoxj)9+3m^;u7jrwF6Gx;Eiga} zApk6T-H&k@#(jQ=3`x>rAq_?oiixOsOn8}IvM>(V#_>s58Ot{th zIeLrIapxwyJ`UE%vgGfq zsjoJ^Pu!o)r>5p781$P5qmup$-sY%&?f{lgI6+X;<2L)^u86HQ?g7F;*NC$(QUT=1 zhR3(7Qa`ZulgeMN91!gz$vMyhp434Qr*z!hu(-G?xuGnkGiI}eD6KAwM&zIV6aM_1 zWmUuN_XSsAS8YyDZa0cp-Vn*WzZ!URRJ5{}se(#RefLJxWEdQEy50yUrnL0ef+93D zbOZPjU(b5MAJXj)%kuC9acf>QsJtiohEznN+B1J^YrW@VA>Z0bWXjilY}26FJB$XM zyYU*>`_>Z>7#thF>)kUpKK_li9M6Hv!ydElWVX3Y_YHY1BRwl?a&DW7_er(CeEtt} zz|SFtZ;OW2BeCfs9c4i$Br9ykPg@n6|rF%I)93RTPItL<&(C-&+t3#*!RYZOBzVk+U zO_utPFeAy;na8HXmp|gDri2@Fe z#Z@kHezsS|byQBs;`uJjfW|duNZ+cCr?`+y_^kA2UKJwmDVjw+(tlxq%5GSK?>_=W z1M=BHKdC)pY2mSJ+&dw$F)^9FUv!X(&p++Wn#z0Z7sJr=U=b5XBg^J;DtHEnm~kGz zepnRYvtKd(8h&4!{x6~RKM=^ju)zOqZZ3%T0PsjihAZ_)fDg8X83qOf8u|jA`}ENd z0T$N47;rKs%{^gmE`KcJ4*BQLpBu~2LdCGEvX*O>7k!;0CV#(RVC-cgca5bg#K+l( zuvsj`q@{&T>+282;4fGI%yd~8`QXw%;Rq`yM`=4R8`s--AdL59>>2MHlnh8D|GBhY zd+Ul6%~%>Ajx)J!D`8ntq41h8qrzQvIQvV-SY)Dq|5mPP%j*R@V0ia80$?1NhE(18 zUWzy<7K=nh`WwH9ci7NnQ+D(;@|4pMOm9D345q2(NG^lrQ|6}GXS4bq8WQrAx)Mb_ zI=wgj#(F>Q*wb!kusp%a-^<5zijT%v0q=Q9MA=rWvh&P5?Lhcc(^>fLQBJcXgxC)wm?qT)Tt-{K7BH)wmuRRkN>|kjAYp|DfHFbCc?f(odTJPPRCa zX$|DM=yIYOSb;<*XEdGEOM=Bl-9J3v!oFIFOj{6QU;<@2-N^F8Bf7VSz76hOSjm`QUP; z1sOvSRK>5#%fWBQan(t;6M2_<=pit%BoGKn z&i({?_0TgdswnlU_4l;ezsvZ$;^5;^CZ)r0rT5EV-^-eghu$Qf?FAUU|6hz|S*dj| zup#?AN2WTIqylX*JG)kYn|hA_c@ZS8UCsS@tMC;|+13O<)#s&(S8mgZeUhIC!gr|8 zV|b|pRp!7M5XRz;>U?CicMfK%;jv}<_?67BBRe4JcZ45mt8ta)`*#Gwc2N3st`$ZA&TmDx<$5;imMy1QxfnA>v*u8@=^|@683!Bf409cM|5HU#v3m`pKEe%|X&xjh4xC z_^{7G6WQuG(Dd%`X$jJDw`Al5L1Oa4#)hqObdaCnJ(ZrPHA&v^w`K%Mn8_3`Iajx!|kjTwKA z4UIm08(M=MI=-)G;bCsPI!1NMu(THckwH{Fb@zD;cd0uED^{MbZsM?JoPP5S02=q>G`tHwBjr!_+4W9j|DQE z_hvNN6#J8gLZ^9MW$D8&5vN~Y)Gat_4CX^;tYmhBxm=i?980GP@y^Ul#-tu)ZLQ;A4P<~0uEy%>hQ`P5ic99i z*lCQrYVBG1tUIH5DlLLHGUPNh$*D2-vL|(8H(%7=G~V&k;5;0Li+%-wk4BdFfEhmo z35ldce~^HA(?dWx?m`oGtoyr;h|a--v6joj&o^qe&xbszETv2t%l!)39fM(K%f+Rt z*>I-pJPoES@EAZYk68pS-%a@4W2W9(^kh3S< z)+3YV*v$?QJlz2PDXwV5a#L3a9+fBTw$zJ*>ODSm#jh4f1=R*~nUTm`cMDEjt5epb zHo5(#2z1(^Bs#X)_aD44=mqi&MdI>z@*og7xgsWPdYeI|>&!|XJ0^t4<66Fu5>n*s zf#Q<0CS882Q}D zIHr(i7lw3Cez6807lY!mWrXqv`rL0j3l`AIYgJB( zzL%@0t_{x9_s0#E5L4LN--!~r>3@NHTbleWa6en=<k}Yf9^W-b#&^w2fO^1i4n}NZ zR=C(gj;d-tKbPMpf9qU5PsztOr>vE3Yf~TVv&6ykc5>|rR_BY)KQBY%3#>Ms-fBpu z*cU`2Pp)ewp+1({o-~y%X(Iw}AqW*=irsLC<>Kn4ntTX*Mh#{D5wmGQ$MQo}DNmNb z0viICv@A&-3O{25GMW?pDH3Lo^5SPtI`p+RgWcY5ylI;+^gJ&(e14_wMear5iELbN z4$=86@b_KAWM6jGgC4ioX~YhvjVeIv$S(EpG8Rcx-9LASOl0dv)i@Jl~l4HC~3xe(9HPW)&IoR}ENlbSV| zZwm z1*4Y)=dF3tCR!)I&QXjG$(#ONDL$>T%x^?0E}P;1Osa_gMbnVfT>`?bKPv1Z#9~eD zZr%gn&l6JBYONnAL<^*EC$pnwwU{YC7NQkG9eJgLIHh7{j+@ROoauNq1`zN*e6Vfz zCGfgJ#X+5XzA!>fws*{kAC&Pr>OUm%|8gAfeZX`#-3QI`RUhZM08(P|qLsq>e*Xt0 CdnSkg literal 0 HcmV?d00001 From 0192758f335c9c493fc1cbe89c42ac2f46594ad3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Feb 2024 18:19:41 +0100 Subject: [PATCH 0722/1112] doc(ct): add default admin credentials to base image docs --- doc/sphinx-guides/source/container/base-image.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/container/base-image.rst b/doc/sphinx-guides/source/container/base-image.rst index b43c201fe9f..c41250d48c5 100644 --- a/doc/sphinx-guides/source/container/base-image.rst +++ b/doc/sphinx-guides/source/container/base-image.rst @@ -361,6 +361,8 @@ Other Hints By default, ``domain1`` is enabled to use the ``G1GC`` garbage collector. +To access the Payara Admin Console or use the ``asadmin`` command, use username ``admin`` and password ``admin``. + For running a Java application within a Linux based container, the support for CGroups is essential. It has been included and activated by default since Java 8u192, Java 11 LTS and later. If you are interested in more details, you can read about those in a few places like https://developers.redhat.com/articles/2022/04/19/java-17-whats-new-openjdks-container-awareness, From c1612dbd4a5c057bb590c95679bd92c75b764cee Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Feb 2024 18:20:29 +0100 Subject: [PATCH 0723/1112] style(ct): remove some typos and casing for dev usage docs --- doc/sphinx-guides/source/container/dev-usage.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index a8e7efb7edc..3c2b5934fe8 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -160,14 +160,14 @@ You have at least two options: The main differences between the first and the second options are support for hot deploys of non-class files and limitations in what the JVM HotswapAgent can do for you. Find more details in a `blog article by JRebel `_. -To make use of builtin features or Payara tools (option 1), please follow these steps: +To make use of builtin features or Payara IDE Tools (option 1), please follow these steps: #. | Download the version of Payara shown in :ref:`install-payara-dev` and unzip it to a reasonable location such as ``/usr/local/payara6``. | - Note that Payara can also be downloaded from `Maven Central `_. | - Note that another way to check the expected version of Payara is to run this command: | ``mvn help:evaluate -Dexpression=payara.version -q -DforceStdout`` -#. Install Payara tools plugin in your IDE: +#. Install Payara Tools plugin in your IDE: .. tabs:: .. group-tab:: Netbeans From bb8df95b752536af6ea374c8f73322b2f90f4881 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Feb 2024 18:20:46 +0100 Subject: [PATCH 0724/1112] fix(ct): make IntelliJ script less dependent - Remove Perl dependency for version number extraction - Rely on `docker cp` instead of mounting the filesystem --- scripts/intellij/cpwebapp.sh | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/scripts/intellij/cpwebapp.sh b/scripts/intellij/cpwebapp.sh index 6ecad367048..a823f8871ce 100755 --- a/scripts/intellij/cpwebapp.sh +++ b/scripts/intellij/cpwebapp.sh @@ -14,20 +14,21 @@ # https://www.jetbrains.com/help/idea/configuring-third-party-tools.html # -PROJECT_DIR=$1 -FILE_TO_COPY=$2 +set -eu + +PROJECT_DIR="$1" +FILE_TO_COPY="$2" RELATIVE_PATH="${FILE_TO_COPY#$PROJECT_DIR/}" # Check if RELATIVE_PATH starts with 'src/main/webapp', otherwise ignore -if [[ $RELATIVE_PATH == src/main/webapp* ]]; then - # Get current version. Any other way to do this? A simple VERSION file would help. - VERSION=`perl -ne 'print $1 if /(.*?)<\/revision>/' ./modules/dataverse-parent/pom.xml` - RELATIVE_PATH_WITHOUT_WEBAPP="${RELATIVE_PATH#src/main/webapp/}" - TARGET_DIR=./docker-dev-volumes/glassfish/applications/dataverse-$VERSION - TARGET_PATH="${TARGET_DIR}/${RELATIVE_PATH_WITHOUT_WEBAPP}" +if [[ "$RELATIVE_PATH" == "src/main/webapp"* ]]; then + # Extract version from POM, so we don't need to have Maven on the PATH + VERSION=$(grep -oPm1 "(?<=)[^<]+" "$PROJECT_DIR/modules/dataverse-parent/pom.xml") - mkdir -p "$(dirname "$TARGET_PATH")" - cp "$FILE_TO_COPY" "$TARGET_PATH" + # Construct the target path by cutting off the local prefix and prepend with in-container path + RELATIVE_PATH_WITHOUT_WEBAPP="${RELATIVE_PATH#src/main/webapp/}" + TARGET_PATH="/opt/payara/appserver/glassfish/domains/domain1/applications/dataverse-$VERSION/${RELATIVE_PATH_WITHOUT_WEBAPP}" - echo "File $FILE_TO_COPY copied to $TARGET_PATH" + # Copy file to container + docker cp "$FILE_TO_COPY" "dev_dataverse:$TARGET_PATH" fi From e9ff6bc2780da61d33226c53e7209528f827e33a Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Feb 2024 18:36:39 +0100 Subject: [PATCH 0725/1112] doc(ct): add notes about IDE redeployment and add stub for non-code redeployment --- .../source/container/dev-usage.rst | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 3c2b5934fe8..aef262f30cf 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -147,11 +147,7 @@ Redeploying The safest and most reliable way to redeploy code is to stop the running containers (with Ctrl-c if you started them in the foreground) and then build and run them again with ``mvn -Pct clean package docker:run``. Safe, but also slowing down the development cycle a lot. -Hot Re-Deployments -^^^^^^^^^^^^^^^^^^ - Triggering redeployment of changes using an IDE can greatly improve your feedback loop when changing code. - You have at least two options: #. Use builtin features of IDEs or `IDE plugins from Payara `_. @@ -160,7 +156,13 @@ You have at least two options: The main differences between the first and the second options are support for hot deploys of non-class files and limitations in what the JVM HotswapAgent can do for you. Find more details in a `blog article by JRebel `_. -To make use of builtin features or Payara IDE Tools (option 1), please follow these steps: +IDE Triggered Code Re-Deployments +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To make use of builtin features or Payara IDE Tools (option 1), please follow steps below. +Note that using this method, you may redeploy a complete WAR or single methods. +Redeploying WARs supports swapping and adding classes and non-code materials, but is slower (still faster than rebuilding containers). +Hotswapping methods requires using JDWP (Debug Mode), but does not allow switching non-code material or adding classes. #. | Download the version of Payara shown in :ref:`install-payara-dev` and unzip it to a reasonable location such as ``/usr/local/payara6``. | - Note that Payara can also be downloaded from `Maven Central `_. @@ -312,6 +314,17 @@ To make use of builtin features or Payara IDE Tools (option 1), please follow th Note: in the background, the bootstrap job will wait for Dataverse to be deployed and responsive. When your IDE automatically opens the URL a newly deployed, not bootstrapped Dataverse application, it might take some more time and page refreshes until the job finishes. +IDE Triggered Non-Code Re-Deployments +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Either redeploy the WAR (see above), use JRebel or look into copying files into the exploded WAR within the running container. +The steps below describe options to enable the later in different IDEs. + +.. tabs:: + .. group-tab:: IntelliJ + TODO + + Using a Debugger ---------------- From 314e2ebf5f678a8cadfb925e0b1ea5c194019b8a Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 21 Feb 2024 16:18:55 -0500 Subject: [PATCH 0726/1112] #10286 changes from CR --- .../harvard/iq/dataverse/api/Datasets.java | 15 ++-- .../harvard/iq/dataverse/api/Dataverses.java | 2 +- .../edu/harvard/iq/dataverse/api/Files.java | 10 +-- .../iq/dataverse/util/json/JsonPrinter.java | 89 +++++++++++-------- .../harvard/iq/dataverse/api/DatasetsIT.java | 10 +-- .../iq/dataverse/api/DataversesIT.java | 2 +- .../edu/harvard/iq/dataverse/api/FilesIT.java | 2 +- 7 files changed, 70 insertions(+), 60 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index d19c8bf3915..7d0141641fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -186,12 +186,11 @@ public interface DsVersionHandler { @GET @AuthRequired @Path("{id}") - public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") Boolean returnOwners) { + public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") boolean returnOwners) { return response( req -> { final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved)); - Boolean includeOwners = returnOwners == null ? false : returnOwners; - final JsonObjectBuilder jsonbuilder = json(retrieved, includeOwners); + final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners); //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum) if((latest != null) && latest.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); @@ -422,6 +421,7 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("versionId") String versionId, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @QueryParam("returnOwners") boolean includeOwners, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> { @@ -440,7 +440,8 @@ public Response getVersion(@Context ContainerRequestContext crc, if (excludeFiles == null ? true : !excludeFiles) { dsv = datasetversionService.findDeep(dsv.getId()); } - return ok(json(dsv, excludeFiles == null ? true : !excludeFiles)); + System.out.print("returnOwners: " + includeOwners); + return ok(json(dsv, null, excludeFiles == null ? true : !excludeFiles, includeOwners)); }, getRequestUser(crc)); } @@ -4387,7 +4388,7 @@ public Response getDatasetSummaryFieldNames() { @GET @Path("privateUrlDatasetVersion/{privateUrlToken}") - public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken) { + public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) { PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken); if (privateUrlUser == null) { return notFound("Private URL user not found"); @@ -4404,9 +4405,9 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String JsonObjectBuilder responseJson; if (isAnonymizedAccess) { List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); - responseJson = json(dsv, anonymizedFieldTypeNamesList, true); + responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners); } else { - responseJson = json(dsv, true); + responseJson = json(dsv, null, true, returnOwners); } return ok(responseJson); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 66aec38adfa..3bcbfdd4d58 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -610,7 +610,7 @@ private Dataset parseDataset(String datasetJson) throws WrappedResponse { @GET @AuthRequired @Path("{identifier}") - public Response viewDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String idtf, @QueryParam("returnOwners") Boolean returnOwners) { + public Response getDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String idtf, @QueryParam("returnOwners") Boolean returnOwners) { Boolean includeOwners = returnOwners == null ? false : returnOwners; return response(req -> ok( json(execCommand(new GetDataverseCommand(req, findDataverseOrDie(idtf))), diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 13e459bc3e8..6efb4766dfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -506,17 +506,15 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa @GET @AuthRequired @Path("{id}/draft") - public Response getFileDataDraft(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") Boolean returnOwners) throws WrappedResponse, Exception { - Boolean includeOwners = returnOwners == null ? false : returnOwners; - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, true, includeOwners); + public Response getFileDataDraft(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") boolean returnOwners) throws WrappedResponse, Exception { + return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, true, returnOwners); } @GET @AuthRequired @Path("{id}") - public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") Boolean returnOwners) throws WrappedResponse, Exception { - Boolean includeOwners = returnOwners == null ? false : returnOwners; - return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, false, includeOwners); + public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") boolean returnOwners) throws WrappedResponse, Exception { + return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, false, returnOwners); } private Response getFileDataResponse(User user, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response, boolean draft, boolean includeOwners ){ diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index d64f77b3526..05dbc4d6079 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -272,7 +272,7 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean in bld.add("dataverseContacts", JsonPrinter.json(dv.getDataverseContacts())); } if (includeOwners){ - bld.add("ownerArray", getOwnersFromDvObject(dv)); + bld.add("isPartOf", getOwnersFromDvObject(dv)); } bld.add("permissionRoot", dv.isPermissionRoot()) .add("description", dv.getDescription()) @@ -307,46 +307,58 @@ public static JsonArrayBuilder json(List dataverseContacts) { return jsonArrayOfContacts; } - public static JsonArrayBuilder getOwnersFromDvObject(DvObject dvObject) { - + public static JsonObjectBuilder getOwnersFromDvObject(DvObject dvObject){ + return getOwnersFromDvObject(dvObject, null); + } + + public static JsonObjectBuilder getOwnersFromDvObject(DvObject dvObject, DatasetVersion dsv) { List ownerList = new ArrayList(); dvObject = dvObject.getOwner(); // We're going to ignore the object itself + //Get "root" to top of list while (dvObject != null) { - ownerList.add(dvObject); + ownerList.add(0, dvObject); dvObject = dvObject.getOwner(); } + //then work "inside out" + JsonObjectBuilder saved = null; + for (DvObject dvo : ownerList) { + saved = addEmbeddedOwnerObject(dvo, saved, dsv); + } + return saved; + } + + private static JsonObjectBuilder addEmbeddedOwnerObject(DvObject dvo, JsonObjectBuilder isPartOf, DatasetVersion dsv ) { + JsonObjectBuilder ownerObject = jsonObjectBuilder(); + + if (dvo.isInstanceofDataverse()) { + ownerObject.add("type", "DATAVERSE"); + Dataverse in = (Dataverse) dvo; + ownerObject.add("identifier", in.getAlias()); + } + + if (dvo.isInstanceofDataset()) { + ownerObject.add("type", "DATASET"); + String versionString = ""; + if (dsv != null){ + versionString = dsv == null ? "" : "&version=" + dsv.getFriendlyVersionNumber(); + } + if (dvo.getGlobalId() != null) { + ownerObject.add("identifier", dvo.getGlobalId().asString() + versionString); + } else { + ownerObject.add("identifier", dvo.getId() ); + } + + } - JsonArrayBuilder jsonArrayOfOwners = Json.createArrayBuilder(); + ownerObject.add("displayName", dvo.getDisplayName()); - for (DvObject dvo : ownerList){ - JsonObjectBuilder ownerObject = jsonObjectBuilder(); - if (dvo.isInstanceofDataverse()){ - ownerObject.add("type", "DATAVERSE"); - } - if (dvo.isInstanceofDataset()){ - ownerObject.add("type", "DATASET"); - } - if (dvo.isInstanceofDataFile()){ - ownerObject.add("type", "DATAFILE"); - } - if (dvo.isInstanceofDataverse()){ - Dataverse in = (Dataverse) dvo; - ownerObject.add("identifier", in.getAlias()); - } - if (dvo.isInstanceofDataset() || dvo.isInstanceofDataFile() ){ - if (dvo.getIdentifier() != null){ - Dataset ds = (Dataset) dvo; - ownerObject.add("identifier", ds.getGlobalId().asString()); - } else { - ownerObject.add("identifier", dvo.getId()); - } - } - ownerObject.add("displayName", dvo.getDisplayName()); - jsonArrayOfOwners.add(ownerObject); + if (isPartOf != null) { + ownerObject.add("isPartOf", isPartOf); } - return jsonArrayOfOwners; + + return ownerObject; } - + public static JsonObjectBuilder json( DataverseTheme theme ) { final NullSafeJsonBuilder baseObject = jsonObjectBuilder() .add("id", theme.getId() ) @@ -388,7 +400,7 @@ public static JsonObjectBuilder json(Dataset ds, Boolean includeOwners) { bld.add("metadataLanguage", ds.getMetadataLanguage()); } if (includeOwners){ - bld.add("ownerArray", getOwnersFromDvObject(ds)); + bld.add("isPartOf", getOwnersFromDvObject(ds)); } return bld; } @@ -402,10 +414,10 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { } public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { - return json(dsv, null, includeFiles); + return json(dsv, null, includeFiles, false); } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles) { + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles, boolean includeOwners) { /* return json(dsv, null, includeFiles, null); } public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles, Long numberOfFiles) {*/ @@ -452,7 +464,10 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ? jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) : jsonByBlocks(dsv.getDatasetFields()) - ); + ); + if(includeOwners){ + bld.add("isPartOf", getOwnersFromDvObject(dataset)); + } if (includeFiles) { bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas())); } @@ -762,7 +777,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo : null); } if (includeOwners){ - builder.add("ownerArray", getOwnersFromDvObject(df)); + builder.add("isPartOf", getOwnersFromDvObject(df, fileMetadata.getDatasetVersion())); } return builder; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index f4e70e03d45..51fe52b5866 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1889,7 +1889,7 @@ public void testDeleteDatasetWhileFileIngesting() { } @Test - public void testGetIncludeOwnerArray() { + public void testGetDatasetOwners() { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat() @@ -1913,7 +1913,7 @@ public void testGetIncludeOwnerArray() { Response getDatasetWithOwners = UtilIT.getDatasetWithOwners(persistentId, apiToken, true); getDatasetWithOwners.prettyPrint(); - getDatasetWithOwners.then().assertThat().body("data.ownerArray[0].identifier", equalTo(dataverseAlias)); + getDatasetWithOwners.then().assertThat().body("data.isPartOf.identifier", equalTo(dataverseAlias)); Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); assertEquals(200, destroyDatasetResponse.getStatusCode()); @@ -1922,12 +1922,8 @@ public void testGetIncludeOwnerArray() { assertEquals(200, deleteDataverseResponse.getStatusCode()); Response deleteUserResponse = UtilIT.deleteUser(username); - assertEquals(200, deleteUserResponse.getStatusCode()); - + assertEquals(200, deleteUserResponse.getStatusCode()); } - - - /** * In order for this test to pass you must have the Data Capture Module ( diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index e41793a10d5..3330d11435a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -170,7 +170,7 @@ public void testGetDataverseOwners() throws FileNotFoundException { Response getWithOwners = UtilIT.getDataverseWithOwners(level1a, apiToken, true); getWithOwners.prettyPrint(); - getWithOwners.then().assertThat().body("data.ownerArray[0].identifier", equalTo(first)); + getWithOwners.then().assertThat().body("data.isPartOf.identifier", equalTo(first)); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index d69a3ac885c..fd72f22a140 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1504,7 +1504,7 @@ public void testGetFileOwners() { .body("data.dataFile.filesize", equalTo(8361)) .statusCode(OK.getStatusCode()); - getFileDataResponse.then().assertThat().body("data.dataFile.ownerArray[0].identifier", equalTo(datasetPid)); + getFileDataResponse.then().assertThat().body("data.dataFile.isPartOf.identifier", equalTo(datasetPid)); // ------------------------- // Publish dataverse and dataset From c9cccaac56121bdfcc8f4bc2038fdf5de0b04794 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 22 Feb 2024 07:38:45 +0100 Subject: [PATCH 0727/1112] docs(ct): remove configuration trigger step in IntelliJ Depending on the attachment configuration, the run configuration waits in blocking mode for more output from the container logs. The application would never be deployed, as the wait is indefinite. We need to run the compose step and the deploy step on their own. One appears in the services tab, the other in the run tab. --- .../source/container/dev-usage.rst | 6 +++--- .../img/intellij-compose-add-run-payara.png | Bin 14908 -> 0 bytes .../container/img/intellij-compose-run.png | Bin 0 -> 3080 bytes .../img/intellij-compose-sort-run-payara.png | Bin 9725 -> 0 bytes 4 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 doc/sphinx-guides/source/container/img/intellij-compose-add-run-payara.png create mode 100644 doc/sphinx-guides/source/container/img/intellij-compose-run.png delete mode 100644 doc/sphinx-guides/source/container/img/intellij-compose-sort-run-payara.png diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index aef262f30cf..d37b9f4763f 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -260,13 +260,13 @@ Hotswapping methods requires using JDWP (Debug Mode), but does not allow switchi .. image:: img/intellij-compose-add-new-config.png Give your configuration a meaningful name, select the compose file to use (in this case the default one), add the environment variable ``SKIP_DEPLOY=1``, and optionally select the services to start. + You might also want to change other options like attaching to containers to view the logs within the "Services" tab. .. image:: img/intellij-compose-setup.png - Now add this as dependent run configuration in your Payara Run Configuration you created before, in correct order: + Now run the configuration to prepare for deployment. - .. image:: img/intellij-compose-add-run-payara.png - .. image:: img/intellij-compose-sort-run-payara.png + .. image:: img/intellij-compose-run.png Note: the Admin Console can be reached at http://localhost:4848 or https://localhost:4949 diff --git a/doc/sphinx-guides/source/container/img/intellij-compose-add-run-payara.png b/doc/sphinx-guides/source/container/img/intellij-compose-add-run-payara.png deleted file mode 100644 index 52a301f7ed58ff6f05ccd9ad99f5293d0e4143e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 14908 zcmb8WWpo_PlCImbm@H;yu$UPwMvEC+ELqHCi)BHJnVFf-?zdtgfn9tFj_0-+Ur6R8d|65e^p)001CLNq$!X0KgzW+u1NspZjs^lC{qtSUX8g zM*!e!?>`$@{I{=o001#S>btOto8H-)s}uU%%HOMt(FE(5HMz=J02sOmY} z!IG7gR4dhNpW+`p9&KKexyU?}&%mGQ{U_t0+0J?kD6o<`&VPZUs+ub|L7c;Bb0AXi zHrOd8R^)$~>13-77sHwT7BtzMK}ASN==QXbeO86@YI|=yJHM=O8WR4|V~lg|8>g_5 z($y%za_%hzn1q;mx(Y5npj!exy{H86zV|4v}s@h|y_O{i+oB zh-#!7H?8&5_Xt3@$s0W6d%I$`w5H9ox2O6$AaopGyDB+C%-d)#88zkpytS7@%YqQa z*3{IT6A#pBEB!GyQ#>d+uooAB+ipwjBaR3eY~iCRhDphb$Ha3mSDeLws+CIw)&Z4j z0Y7xe2Y%`3=%C058_@+45_C=bNk9YRsvDY!L?*%Cl`RUr)ZrX9c3CB#ZZTiRP2|Hg zVd|EgR68^dnb;;OgEgCO9~(6DJyE%vkv>?`aRrzS}x@s zwpSLd0P=FI^P$wI%dv^2SOXd@Vd17yX#}`1Os}Dti4K4wGIlqQbAd@Ihu7Uxp%@kY zZ`{n5F$kalKvY*5w)S73Cf9;pn0P+_7OtHrHY#)q5W5#U%$MaCz>ELb9{ zLeY}jH1&CTYM!_*`OjHx|FsN|2V3iETEh6Sl|C(B{glc@NH@{oMun1(KsbmaM7Z{V zC1)j5R?itctbkCI60xt!0xs9L{d+XI0qOB1x25a!vHz`T#BH`gG9?r`-*>E zX(iRSF~2e>RXgl670)H?jL`KFct%kU373q&*tpVjMwSI7Gru{O(_fp;Q8fVtsn+_{ zp}PqLFhduGOzCDXL}uk+zpvw!i4X8Q$6kJ^Mr(4pKPu-2L0i^hb*$%)B@K8*r>9QD zWCNDexsVC{6|#s}<~{1?&JD!gTXuEK-om59Ki+fhvN-h(Rx-^bl>y2K&|mfX$^IhA zMfbsXuc;ZyfWd)_`h;yYeEIS#rfX}D_32S@{S3;6-J_E^`09<+?JkCjP>+P~P7OxP zR-Al5|GTp6N6PRTnz0u|wV3=x!XoxI8;M(!WE89c<${WvW|l+en*-dSK%Ltwj<{1f zNPs`+WFc3vn0UyiWt(~lXtK8;)&(4fT*6gUInQXK_>QVxS8T+reNxob)kYbW*DZhu z9WL(rBJ5|UEsY4Lh>d;y;Va=#QEPRf8pMt$;z<6x_mSR<>nDntv~>>_3l&Aw_8tfX zmvg;5SyM^0-V94_O3lS%G%D4oFE8LcNUV1lGZv#RUd_&dwF`J2vyj&Xg!%bFQI$hb z{|X2zWmT#;WBFq&FHO9}3lL=gzNZl^Wl1nzqQhcMMPC~i#}fQa*sdtt#=we++Eyrv z97i)DS`{~ZPHVbQMlOd@hxBdrt4k^`bCHBx8if!CGK&#qE%ZWR6K}$P?aEU8T841@ zvBtp^)U?T ziVb%Fn9VAFA4Tw}Sz|44DZl^oY?s)VI@P1AeG$_;Sh|-;lz`SUS^7X;QXF@?XGT64 zYE0H8ng#CmH<|r+FEE*}UV;z28J_N#GEl(SAVRrY%#IY-gf_H;atE?UpYz5xE_+J- zhY1qxag zZ;9^2mhx&*+Gh<<&h;e_or=@%;(&f9Fwv~0P66|%hE-Fd!}R>PR3nf0r5JL{mP9pN zy^vzo18G$Shn&(ZzS>Mz#L7t`F;-SuZE2rcpBuoUm5W7~;;GDKI<{}|RFAcc{wL$W@Hz6j6fBu7i`jd)~;txQJ+|1ZC~%hcKwLubuN58|u!rrKbi1rsbLcDf=|$>FbmHAfe)f9;@fRxtJ;)x zX^W0IAN>vz=udMsb7VcH^bbbZi&xj%OLsMmPSw^DX_|g|dD=X%LWHw>_~Oqs_>oGx zy4TYwr!gSU_q40*_0i+-Q+y3#Gx#DgHYP@7peoWuc|*WgkSJUn{0cU-mx|no4Hr%e z@u6E3iNE)A0}5(9Vxb6c5yG7;hDyBL9i-YOuvjm`Sk9F|{t9k|RxMSbpyXKo8M(4^ za1t?PzWEwBk|awhONn;CsMBxGws~-3Blx77y?{4)f_U#A=!;_iWCwCOoelNzez`es ztqqBcq!eI?Ow`Zlou&xG=5mAawR!S49*A6;aM7NnW&g*FHN$IC;F9(Jr|n0q<}~Dh zl`9ZrU<^-n0=@Q6!J<_HO^Wy(N%s+&VF6+QKDS2zk*`*l%#Qy43sHyhTT4vrazfu- z>W_PWfRzFD&ott=eIj_cJ_5Iq1bVZ_%j|pqk@6;oa|{VEu(Cu1e$V?BD==Z?bBw7% zJ1pnp*(H0S%S+($iEWkh$-XoWau6b3WS8AZm<^PFv|&%s3KAwQHwyZB&X3%T&2(3@ zbzEdU&vg3KkCHerFk&(?e+%WpB?u`0uEDzEnZZHo`&JzhiaU_5z$;#{3awF&G2D9t zEX>J;jSv&Yk#*aOTDcaiq--ir1S^UeAc?~n7y2vVC?3^oYDktHXcszQ%=fJ?2=Re~ z7|`2Cv53@Cs#gAt^Q*)A1?=*qvJ)Kv?Om5vM=|72#cKQhldC)(CcNP5ldmQ6%+&TC;b=J&*e@N zCvN&kV&4v921+fj4_7L0s!YHgP0M#hx(ew`zIe2ahp*Gnc|UzDIzM!Hd%`C+FkWpX z^+)=86*#^=KI3!r$JIgp$+qdc$>a-MNa0MXKQ|3sQKZf=r%{1n2yi(Qq5lAgv^wXt z4EwYZh=p}6Kyy+J`Gl*6?yw)Wd%;@sgxiDr(WK1E%{T9yoJ7{1ns>Agpt}aWdEQsK zP7p?}#$8>xkZ$$r-JdV@tb0>6C##MF4QkMS3UoB_hGpKR+hLalb|wO?g$pqmboiXG zANmQ7R6HSN$ihs`)VA~*=?PL6`hu0qs1^egAV^7^w7quT-URoj}%V@4FCeahf6Y7$nFYt2Hc+0zg5h z1;93{rNho;m_5a)K6c)55b&%GxJFYALJG;%OB67lBx7Pv=Y(>$&Xk%lEYZR+&bUdS z$29m(%x87o;&FzHqG36(%se+Yi?H~lQ(m11+&ATmqa~h*Z-9Zj*$uIozIucrEW8{& zxXGnOp6;DU7ERex_XMfz@n)O5s4ZivZ(UAAUhOEtBD#U`XU=J&UrvS%og^0UsU z>>amf8(|Tw`rEa31ctE!n|lrN-}OU8u1>~s*YIw}dqGQ?HtB0dKZIB&&39MY)(6rr zKic~zNQvW;&CT9sfVRsnmI3?SKks$9yE4E--5M`sLt>rlfYt+5(PM@a!F;Sn2nrD% zDGx}dTQLjWj5ck?sI)VhK97eI3@>*^tC#&vk_H@D+{&$WMvF-fjEQRhU@2Sj6vmy1yJG$oc?>1yqgP4eN% zC)t1d%jj=~7G@qiRoh-#)gciOnTN+#L+WaK+Sxw=Gm@Dy%=DjsZ7|ZhJ)!HtXwp@% zVc&C9J~$r?npNJs6D{0)DO1EGrGD)JcHTR+=#S0inhkOsH`{M>S)AJ(PR~LDRMW-T zoaBKQ&u6_!U@~7kInPNrE_&6MAGalAzK=I&KAQKrw|TS;9u`z6?p_NMtcLgZTPvwL z9||~e^$=ai7<+%^Zt+-ER}PH99ebo z2FdwzFR9g8ScfjcVx@8P@$01nzcXN|d-uAIt((olE1))aROJ&lw``MIT$XP9nXiXG z2x~``fch0Py7d+<3F0iUm&ClIA9%9d154wH$ojF7RS^zwM(B3%q0fAELpYIA?Pffc66ZJ(v1%lE#c+(p)>14@KsnMO zg+qPDWxtHxf-_(+@v;Z1uG4>=F`4(erl;Qr8IBq!7jZ%U;(n%_N}yw4Nbh}VE#C-> z4`n#VHgo#yCfkj zPVSrMR=C-!_)h1*4&^hMW&kLM2Gc7(eYS=hVXbpx$L+^seP(*>Vn2O@{5u-2uXm7M zBs}VVMWg5a%A%Z*PFl-{@dQFmMEW3hwxjAUN#SZfqT;w%@2gmAU99({D)n8Gk85Bi zSaI*^JG17m%sHwGMexnSpy&B&*d3CqIUy|d zX(#;8y)osDTjD;O#e6(f!RUUv=EUT%dHMpXcZk1w_-yGxPv7NEo(3wH$1d&rou55- zXQt@Yo~P2^v^2?@9I`AQGTcD6o!=-EQ>_i`Awxb$%!d221$=KzV|eqqdhN7slFfPK zSOOZ+sj+=yf*&BH`2ofD4`7DX0gcaLqp)?UZ+OrPuj^mI2d4*HI(Uf;oB+oR%r?ii1R_=>BtLE|F{_4!BmG-;l2$*y_f(`-HwSh9wUr}|n z2>uv`zJ@BOrS%jZkejx@a7;9e?M<_vu>aaSwiI}_M5=R-Y+UzFEEe?-UB6_`{6MjK zsfvjij_z}O+>e7N&`*=yNJ+^t4n$p@ndpCdf&SB5?ivx$e;9~2)LXdAW1f)6(eVnT z;^fnj%Ofv{_2k6Tfm>PvC-N6@&_UI70TZ8Y+B`a!b#WlI;NA-t`11b#yI1RY5dqB` zEC(y+3^1rwzKLq22@6^N$F+o4hlQoSp86m_uT@u< zDTf?@s zCwZS`xNv_MS-xYS0y(sqTOyvCA^hETc|6c4CLCSqN4+!SF_rCB<;+v4l1Uq`Ga20` z?#ertyvVwxRR6Qn(QbdT^GlPSQjqbIe~_+ru0UHfnZ3y(Weqf9t4nsr4=8kWM-CcJ zdB@91ZdBX1*?h$=)!_ucE(72#n!987UfjG)ZY%O4ATHBc69;mB_7OeQnLtBALTY$fFA25BK%m%HzuHF5HhJP1yn?%N>d#xE^- zqho%0LZU!q5T`Ggb-GrD7#@eZ1xCeKFyLp#fI;cEIpOpUv@3v`|1JEg>16%g$A9sJ1u01^s>`YB=N+O4LW)%4ks5tmBT;Bj@ERZY>iqe02 ztcq)-XZhi&+BJjPQy^?HnWZQA-2H@0V7z$CR8S7SMEwo)@|sXnx_U`G1vI!uFuGnCtABj@KlZi!CRMes_m5G8mF{o^G*C5!8;;vU4_{DQ$+F= zUHV6q6{6uf-Z`Dmv+>tG^Wr6MQe~(G3MvNhT+rwh1?ywy7$>vxa0^e-N*u=g#+@gMVO!fqQ(wVNw zKZydkq8m0_bnQlh)!@f15UGVMTSqV~b;1ZK{D~-Q@4)L4JPo!xY!(aZAUS31w-bt* zI@LpgGyeVu!J(#1FlPckxk_gHQJQI$cPHN+QSrN{SZ^89pZvK8;;25ulAS>vgJ#A> zQ;!0p0|I6TBn18$7T*!)elR=-OisGR=#TIF6%hJUA%3s*K+WaUBh$u{Y#ee-Vj_;Q z-}t_t9_npA#}%=Vi8q#vfYTX?=598P%^=8`$!`(a?e}jPEM$?@5Bewe3<5&omM`Wf ztWML&w|Iq2*9$VhV(`P;Yir7gj=>wE3#z+IfpO4Jak4k4hP;-Urch3);&nEQter~! zB?vYfzpF=3+9=r2;%^kP_NSda=CZb*;@WR08E$zD9on`I%Bn%m^_45WI*=_<6~7$D zuU6WSG%%6_V}k|RE#0y>Jhx7C`^QW>2FJ(Dh7^CDe#3_Wbx}%Wbw(-p%kpq)u#^-Xw0RaT@ecOq{Vz;7I5Sx+OhZCcMpyUHG zDNa@m<+1UmC$+_WePL9|Kz1x6S}weEdK^J!IS<#}BT-|~=~PTXK}z)ob2YH~wSQP~ z;0!1|20*Xz=+db#1PcHunfiUu99Vtpe`4Q|`Zm6yE3BzqtgJkg%p_GVrBEu4A2Pa}1gPANdLq;~_<91d zAoyqWlBt&n?(hq&t^ielse@dgWAQ3KbH4<#Ht>zUnz`3PN-Z>wG;x4P-Dd)#jb%9MV^-e)+f=J&KJESom6N|0JS@A0n)p;9Ot2 z;;Z`^#EFc$M$fl696j$3os8D+P#O)^?p+^^wkv8t*`*>;s4I~~>x4FzKjqcU#L$zr z@{Y$}G|ZPnAny5FcILIE#_? z;R1kyU%YChq=(zsiMe7N({(*7S;{Psc~X0(hSTDN{v-+t&<=K@(Y-uxrKbuR1%07j z<#URPqbYYw^3Xxlm-Q%%&RyEExKRYuPh1?t`c*E_=80QxhBb0t21+YL+#4rx*wb+i zj2n4coM_4R9CVh2pKbox3&z-pO-UHGs+2SQ#9psY3q*AK?*u);NG6|$|1^Ey@1Q>q z_v#l4qCPJ~`qt5?AC4#$6TO%70e~HpBcoY^7{tco6B*!fCo*N%Kd_XQ<8iV2m8zyN zmmrEg*)jE5J_894&>{R!IKVr}UfxE_Ay>*9TW^=HSdi8|f`;NSe zHs`%rgzWYSfF+^9kmON9oy>0D5L^+DujIYd;J77xOi9o>Uu#Z)S3fEZ@JiUQOY8F# z5>vkG54Q4E84I+g>n@$)%$7#he_a-8L2!rZ>9D~cIeBf=;g)R5hX>WmkiVM4{n_K$ z8qhsl?_^#;!l(Q=adwD5ZxeXn)@m6tkSBW<|Dss~RbEndLW{0SqYYPFOhc#H77~g^ zb@M5exV;XUK>QrP5b@^0CR?2GeBPg^O4KXIdqZP2Yur$Zi|1wFgUQK|Rdk*vak3j<@Dc!e)6C`_vLf;wWw$ zX^u~3!)dJlp5%+eYgE*lv<7Yw`TB!S1J<4Vi1rz_4eZB~NLLiAlZND*$B)kRSso7H zHFitxm9-aq{~h$N$_ENe{1&CTwZPW9M4s#G85lg&4f79%>+G~>ZcdCq(RJGj%jZir z>dI2sx!gutOvwV6XsCCI=f5Wa|B=d{D)V%yJ(w~AuhyrXo!aSQ-JeVXG+khWmq4ed z@;Vc4LYm9&v6U@FB_OafA5@hmT#m!lh`{r#HLrz{7eaa=>11#``^zY{#(AFDDFZ49RnNrZ_!k) zy}fS6J-)E8Fa$KT(RAk`Omz{d5Bnit(2H zx-!h>jrF>=2OIxzO~OR{YbsY_yR2B96#&55{-+k;)3}-%W*8;|xqtxSukmzVu&U<; z_Po4;?Slgef2aqn~8o;ui-Le1IZ*Yy5 z6|Lvnqp=FzR=EAnKyOMTXStv@K~PsZ=jHx0xsxk}l_Sdi9tMfk9d?>*Ucr9nP@#w! zQ`WdViOgP*Ydo=~h1aEB7-hX=`X)yW29?&Zh8eVGnM{@oL%C0*sSSh5vfI{$(vhEE za4T=_%`-B(7zryGIF`D)2rh@gKqLX1%`$rWI4mHy>+fW?XqCq3_q5O|Z6#RQ1ujqK z)JOrsO{Wb4Gn)?j48t~-%5MW3Nxh)N(IU%xv-{D0Yjk=84WyDaXg7peaoMkPi6K@Xm^vx3Xaig)PX^p{N`)j0PXrbM30k55$AzKb&? z5a}6L%oNJb`wTZhQk5r%jKm}_320X{RPFL4ehaHlW{o^nm!98ulB3x<3qv_?mc~Ly z7c;!8Eai5&#l~XL0T^6uQ`Fh4G!PR5^9pOtuat6fbN`HM@eReBl9@4;W`3bH^RiMA z|lRNY~}bMBnbQi3l|f)Spj0raehx$BrzN~7=1fKg)^-vfAhOLTi;y5 z(*7}Lfyla`=h%i1SBv4BoLH^_V%NT^>d01k`+m7tns8E6m%)y6*Kyq~rLLeCZ^kR4(BAEo4(PZya ze&ClU7Q!(BBfF26#o9-7Zr7yxW}UR-=}LV9QkV4ncVm!}*#^rvW~^e>?C9v;pqh>m z=E;hR*8Ki-e70|WQ?);9qfwc}vtL2&yeFW9ZZH@$7N2abEG`4GHhGb2;wSr7Ov zfm$HUITU}kGOldwAAj_T=b$cPx74aL|GIcjqCsccp?A$5DmCB#vvz-s2^&Goj9zs- zS=Eb3FMc4XuHU@c)Mnh})F=lS((czH8L4wO$HFP%Nh( zUDo*wbX)aeN4DcT4=6;!-sO$?wdvx+t@H6s691>YME#IIe;RPq89w{!i6KViFpYf| zh(0Fm)mV#>_~}aIeY2Ij|LTl$NQnXS{I8!0sa2eRTJ2t+S={yiL5fsHgA;*NKv8|>Q@_0-Yfg-l^B($#^ z@qZP!7>GXaM)@@EGK70l?TDq)l=Z8B%qN5x#>74`G9AwKiFnB5L^MhTP5^Pbi!|D9 z9Y@);FU;2O(v|k4S~*49liVG`B-dH)Z=7SYmM+?5MP{f@f~yOJFm?3s!h~`Wl~4FxY%eT1He*CYYV)Ve*~lsK>FiW$D{m`Yc4pL*`XHLn8ILyl5tbbc+5%DAquP*LvZQ(g*qyq^>CU>O$oqE3-O*e zZ_n!@uc(EMWivT^c*#<$%h^(I_l6ye_2sm*M%e*pMw>PEafV0FBgs~jzOh^D8aX)W zan<>Xwosb#L8urN?DlXjlTY%MFLFT}9iQS~Z19wSrXX=bll$P%!%h{Sq=E+5XMMs3 zeMfWf{EqgnDhyUuyu3|jEY%Vh;u}jl>oJ-7$m)SQ9#2uJMd-+PP_c))Ukq)d+TXsA zM5e5M=~?q-aQ2uxPJI^7{XN(!soojqK~EW#Bh`F*bu8tb|DO0_Y@_QceEXrS!UtT- zG{bQI7#KctQCL3!>!UE3+JP`Kt7P}<@G~K_LOPZ@9tZi0 z-H(JIM+TXpQf2BRPZc(+{n5KlwVQt19}Sh}ymNWlc&weiO`XKV0BQ?$jjJ<`mKLiY zV9Q1)8T#_}Ck!)H55|EfnCN566Z2Kf#XqCHA)c4Va2}jIaf?_6;~cn~?`VG;qK*_; zZzr`J*7cn!yxfITWZ7`)`IKSy-y)1cnN2_ZG}!+m^T@qA-(Z5?St+f(LE1?qCY<}@ zM{WsYVr5@csuk4HdfoH8voJ{}_dxI6TViNf2xXD6t}J&(o=$av4`alsR)nduijID2 zxL@nrq4v-_L~5tm_G~K9(hG;ng1VF)lW}&4Zv%VGPoJ+@KNF1Fc?vNsaKPXn~V0 za`r1c`ZO;kb$9g!{8Nw%w+FS{6EV>V0b%Ru2L)c^OENzsABdUarYqhbqNH0a6P?&j z1fy^&5sdmv$sSMFd;;{=hHJoa@_hi102#Vg7m4U%0Jl9pSF#@{Q@a{Zon3dv8gmufgj~yKuF}z=p}ka)O_9X@92_ zMw_K)tev>Rbv)|%7A<83A*|V&2|VpbP&llt;Zfs2Q%ac4ETbBOrdT9I?2?KIM53Lo zhoD>?12*(+e#RH`wdhh$6g3l~4U9CCF#UpIgTF~m=}kE}Q9H4rGb%l6nmLbr*7qZI z02b6NhnbzN9}!do>W$4ZQEAv=4F;-oyUVU*=-GLBF{fIH`cz!HvJCY~CmE29^R;&9?KHV^nI0+}%TY4mPI-K%Vy2emOpYp&$8H zT(OXFu)KGDcRI0@U;s6gE}-l`9Jkgo>#chHOiTU8PxwN_qvdXf%fi9}8?oZ^kGMp` z*Ppa!=EW+v?@-4NKLZr(kkws1k)0P6gMJxENNs~uB}nMPgI|G6RCe^^!Bvr}_A`e? z%S{AtqUj^;c)*S40M*W}0%xivZ)(zu4IH%(_pgQfh)tDw`@V!XeKV)y6$5`Od_J8W z7m4TLYnRZAS*AX1t7Xhf@%L!VAvg(m;#sP;$^4G)t|z!BVY=_V(hX z3^^D$;ZE~Cn42X9c|^ubi_<~ieY5qtOL92N?3;WNV0wiV_U$N^UDdxcIeMJ zIAbmM`$C9a)7Use8%>;J4w5pDt&5U88{GtN)fOWLlxs$!^J|LlINQE`g>$?=>ctRs z+9P`LIinofM+J?89mX%r`i7>$&vk>(;3sPKgL?wP2}Y-b>B%#e297U*WubeviL!H; z56X9^B=<|YyQUIprbO+BP8({GiT6t~Gl*(l&He|OxV;|{fQ)<(DE^P2>Z>BvCU;3Iso z-VPrm_*V*ggJM=w@{s2D$NrUPdrH*SK!F0X>F+H{1U1d-=_7 zcP7Kbdmwe6^$d;*Gxcs>K9(<~Ivo(hTd_bEUG~VLcLj+7T1E|O=pGzCPU+z~ru~rB z9=V*s+U)7vTq)iAXtk4MdWcnQ-a^=+H*zw8O1ASF`v~Z z_AjU_G89UU9q!wacyhiG%aJ*hEkj0!ELRijlvq}*bit@t57~L3hi>-ZPl;XWsIj2o z`mC>nMMO0DL>34{0{HD+g8_;9G}V2ZhuGzA^nt5DyX$HqS{M^k`KD3Z4c&dt7Zi-- zoGfb0eFdVhTH4sPR#)X(59f|I;Fpg^gZj7Gulad-pM{BP0H4P#=#KYitGG5bajD;3 zdV%n83A6B@^OTTNnQq@uKarb$XLAQ%XK@gr?j&QC!=7}8@4I_zu?&@zk{&VjTY`zD z^q;Yfhewe%e7dF2$xSYBKr2=WuG_bFC}iYR0J}@uwmneZtX(@MEp4>*qGKe5-RiV} zA-S-SYHEr(vW4uYArr36yJ}Oy7eInF-B97#XC+@}LJoBvEgYKOW0>oDa$f)}S`w?- z*FQ$yZWdeJf%mm>gEKR+i%l9YT_#FbT5x`sL~$Fd>c2QQQX;#|^Ryd(-p9r#2M0ql zGc)gbL@RxQxgsH;CMoAa@9vN?HVY7DeSQ5tHZC+V5sR6*I&MC5Gvu?NS5a7l7pj~- z2p)^E`gG;O?Y#NmlpRYo zBtCbR%FcHlQ^zH}^Xhf)kQZBLHmJ{M^thG$e8!>SVWX+;z}^Av%*Bhb9Shs`p?FD)By zT!+7Z!OGrj=)IqBO&2P3A#XPRGeOT4=USq+033X|!CrL-x2;eRF`G4#mZPG&D5K)V zn~W8fF2EuGS3yA>0@LGm{s^%EL%o4kur#f$uC-x4iuYn)bbB=ouEX<3HWhVRI7tp^+NRmBPx5tM_<~ zIt9cAwfLc^(29#S-A5F&S+Nz(3ZwFAC7#6Q_0Lox6=V<=Q0Yxpp)GGaI+{%TJ`v%` z=9ru|cQl?RIxO+ll_X*}&#CEC_g(J>OYY8@c2V6`BnHO|1!X5sd|`%{ACllJa{cms z)!fM)hcEbN8ZagyA&73n=di>{xT|L^eahV%=1)#ka`f-8(r+Y)cvZ#&UsE`2{>Yn+ zWt*~LWAiw{V1^}!u;x?HP0tVgxNsypx$vO{DWlN)M zVPkRjr~!EmyeiCVvXURfC$>pt|A1$7${-}SeT~bcTf>KHm*Oy<)vN_!t~QPv-IdpHxz3TRG|1E@V)gM%x1jthoe7b7(e$1>WrSrU2|G?$D&En`JLo9v zYYb3-V@2ilzW!(FC$8(q#RdYGy8Xn$o-DUSZaw&G$c=x{#m<`~=D%teT@_751#v$a|DVzGbwXu@SpDvT zn~PfjdvnPra`w(D7dB_bSTT#(OB{4!s@u*7^-sELbIDZhR*G_)pCmSU&#>o*H|py# z!-4zX@dt&6cYlS6AHEXPdfyv3b72voK!E#`^O>ud!{_w~hsR<7RJmQDE;ra{BO|j+ z&uG@U(M(PK_+*zEyVAAMtVweUVDYRS>?I@wx`W~AL)0d)4f|oIt6m8Rkb@oFzb~;{ zpr1*UWfiHj`MBMPKi5+|vGAd_Ct`WjaM#>G^gIVC-K|J6fG06jd9IjPjZYLwj#KqT zc|PXcUBpR}=JFTqk>o15W<|JA5mmMV3CvoC?lDpiALOA$>Ry&obv5$6N;a!vMqKMe z{uc^gQuzNz;kwkKPk@3O>yMo>8?~dDd*N|zpVSAiERlmPkS_+(tSO^cyGfIOR#u7f x^*z60>+Y4O4VHRNP9mxa|M&1|=Z5R)sL@a}Qv(118Vz+7gKM5#i<|P+_4M+&r+f`6 z4|Q{I008v;zamQ)0>UUE+4S~BvoJ(1cUZJCl68Di zQDN^sa^|Rdgl$-PRoPI4t(221%qyt$s~??xl{GjG##Wk%nl}H2uzmI(Aq@-cbEZ%T z@hT^z0Fg?Zyk{vPvkF0j7q+Qyz12GxwaI7l@mlsep|~B{mLM4v5kkO#DKR3}Uu?vf zsF9yxVoY~px+t4E^xBicS?=8I|35Y$g_lYLc`#rqZ$9@%%tB<+{M~_KCZ72n4Wmd1+}mv?d}u|7BvRK+fJF?0#2e z)+;>0i0KY0DF^`^;X{Cg(n3me7dI>qcGkfo_wkuJ4>Bv5G?0`a5V_u_&%77o67lq1 zdTMoRYi`gEmNtAf!+tPdp`xN`_#seUdb;SsT`brDY0SH#>lHX9kc-aqhwXWd%}7vT zVt2PT)ZjW>p9K=QRZF?K5tL4`XR(l$+z*Xtk96jAGkX-h4HmkGTfT%SorY*qYE~J-_5mA0B5+9H&O#x0v&*aWF z=ebRV^-Y~YYO4H`%GwT0?Y4cX4fpytKJTsZxxCm+#0L{;2cqzg zMi2f-D;-yW;m6;1XBSKM^POEA2whSmi*SlnZi$jvgXt>hufLKJZvd#FxFG@$-JKMj zGS8L-fx`@_hY9-)4kWX+=5fe*&9q?=WdfuE)8B?NOPIOJjrt# zj;(@%KDvp%&4(EAu-FH42y9XSp>*oL8ZF!Es9*j<{+PCW%hKpDLAbQ>(&V4#kK2ZNF8hH9&8OXqZ{Ll|+#9Ulg!*}Nz^CzToh6Y1 zfqnFd}PimysbM4dIa{1)K*s`Qy)5^{vxaR96fG#wqMAr&r46% zE*^)ozT(HEgS-kGZn!N(eQl}u=fQh*XXZY#yqMxRw#*2E-@f#h!PcXp? zIn!Ydj3e7RB&HVM=tMUn@!0b!PlB?sfj;Zfd|bf`vck=oQF}hZ;jzOiGv>+ceW4KS z*(ggq6?t*n!-HUBfgkGPA%{{)D>mf=-NighFL>dT)peJ9V`+(9I}01ZL0jZ&vKIci|5?((?tTK=1V?ZOQo zRo8j;APV}_=+(v;Ou`Q3s;YJu>tzIV&e^FrXSyRw@6qD%HI-_`Z6eRQ6#X1TYDI54Cumen82H2h_O=EfNtF>ZrmZ2*R)ZX3h;4d1Z?m z#H5!$`y6)Xf4_T<^=-cl*K!mCPgURAR#0F z0mb+)!h0n80;~!Nj2>L$?^m0o&|mG^Xh)Y9r;)<7JAS0n+7jLUA4C@aimOg^ZSdBN ze1I+$%q{diTV6dt2_F6*YP(7$T?+r+^%~WwqUNkX;uWXvI%YTQapz zZXH^t3eE-X*aXZ(C0;<# zeO`F`<*ET$EP-l7s<`buzowa~ccmk+H2a4wmSTyoe0^0vgXliyO~XnlY`2A9m%-p7 zA_|HQp&KhdF;a7K*k!nVvT5~l<9feQQ}e!*i3Q~7nS_>DbF=*z#@`YCfeps-oDA8c zxNO!9b>HWY?zHJ#*D{$70pcfPw@U|zX0^e$_>^iayUH?+J-)6`Od1OXyxD?Uz(eIe z9`9Od0$|w=(_tqIDMQvZyLGXzg&EKigpBO*%_NdtG$gZ^mq(MKkl%*+;WA%p693D8 zf301ZB3ozkMz2Ji|56+ExTci@4`u-I1x5e54Rw89>MYS@o$O%gV_(gt5u$SB<5nO$ zZ@z(DFZ!z_YHePzy9$ihtg!OIvYw8wzAh`fd0e}a&deSf%%j;!KdLSPqy=Vw5v546 z-&=9G%EOH*{f6d3e8b&_a9T^lZ!uC$sH;hX0YCN&fj(OF6KmfYVAwghC<%G_KWOu zx685B#D01^%7Cltas&F86ZiG7eZybV!(&rq;+Wi?Z_dRqIYv{VPEu;V&6qKeoj<;a zqxX0ek`$JE_Egco7O@te$qrM%2D=a|Sz^qhyKK1G6F!~e{1DM4f8eaN3_A8we1R}R z&>X6FLl@iWsrfi#nX{mv?GD+*YNtRCC5Vr2?VkMJ&z-E~6+t0ql>*0)+5WKLpB%>T zS}cl<(>-L8gdIACH5h&lr@Gb&(}qa1tLWcj1p$%#3|Wzyhx_|pQkSlG0UPb{1JM?jV0Rd79U1s+^+B9!;JssNgdb-ciQOy}ylE4DXy9oyEKO@XDLb$aVI}8T2 z4?&Yhti6M(Om}u2m_4F8Qc!z*hBUXjP_jJpx1N(pkh_cB6Z;?bOwm`_b8h@?+n4KV O571E6QK?dX6Zs$WY6r&v literal 0 HcmV?d00001 diff --git a/doc/sphinx-guides/source/container/img/intellij-compose-sort-run-payara.png b/doc/sphinx-guides/source/container/img/intellij-compose-sort-run-payara.png deleted file mode 100644 index 4efa1b0e925935eb58cb76654d6e89099047d959..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9725 zcmZ{K1yCK$w(iC~f#9AH+}+(>gKRu_aCZyt?ry;yg1fuB`^FtM&f`Dt*1h+gI8>E4NCj<5-<*0Fhg(3XPbTgDfTr^h3%gc%IgRd0|fHZayR$r+Ha=5)qYR$09XziWmNZw`ZUMm)l*OPE$P>QrSCbE4M01LP8P}X3;ypdvJNsrqwKZlDUkX+JsNe!f(wG zo|V}<=n;0p?+U?M(R^@^Z-!6n#!w=F-OCM+5;O^jiU_-wrv}*oVd#;l1n7;2OS}t#n`1Geli?i^^vqFa`Gq}UX2Lzc2&{50=g?-t!)jLedez0TabZ=U^##`BUzH;BX=5h*RFiyqn2hZ@+LaiZ44MyOy)8ej@S zbeoKy7Go!8ryUw}moh`HVGTv0D83dJ0+codcHC5ko~||f@1dN~6j0o?23fDS+#J&| zs7qF1^@q)W`u68%js;%|)$HIbP+c@sKH6!Lg4WXzQ$l8rG|DaMIo=>7?)>M(pAt`j zcQUmip6UM2+p$NxM|j!+T$IS_BiOXs4_t7v^zg^Sb7Ue+Hd9k)=HHJt+lR(5C8pJu z<#Q}rf(l=Wjh=MYnJ}=nkdb=ZuGmZ%)`dD6C{&Wyy3U_XYPc6+ikpn~AWh7$?Kb0} zD24b^TZ|Z+%Pbv_2*U|xLZ-%SQr^cTPFY%4oowC-9TC`-3fOj@Zd9;nUy}0aT~@C+ z1V;08BV+580=S?>b8nuf+h=_g{0Ns{7!t?g`Z@$-G<&RxZ)U}u6T{d<1Qs+Oj zO(U!yeWS@?nrQ){R!Y7n_*~Ta7t-YyMT_2yJCeUR!j?54EKa6O<~I_Wj06WS|8>Me zA_z~()#*zhgHr_5pW*O$AwOkj?M)<7R2SpV+N|&vDsWEtSG`B!3KA;Z_yN}h_+je8 zea|Q{7Z$@C$aGpuB)z?v^@!R1x21_5CrPb0n$nGn+E|`*uHWRYs=ocOBAvBezQp%kc)X-JKM%NSP=>>n>z(UAGtSQ}#pL5iC z_>xE{YVm#~lTXY1P0V&q&WR2q8PPYMJP=@XK9gq2wty~W_c|qFQY#k3iq zz^LDTt2u36iIu)y`F_5&-?Ktx$}{4^(AlFNOb0F2H(hHCZB7=JBK{jONja*{Z5=I4 z$M+6n)x{KjofHcf?5-WOJDRrBYnH-jg&<)w*7FwxNR*=x;Chv)Xd za%P5EspRV>kDjWw8f{XolijxMl8`f8CpB+e^;&PnO&g3Y^|{mBR(?4Y>Cp>I(`RwR zCYQL&cfmvmWRQQ;=|yK>(m<7#0!yWg1*?bs@F;MyOeM*F=)&vNP1>8ju$VJXH3sNT zOf}dyGn6HxTZ5!0(HN(9<2+J~E^(#kLqp8#jg3FCB4j;w`M53xux+u;{U%8RK`%%` z%8E0rTN71E`PhuXYA#!j$82@ZDpm1a!?>u)V8NUvwbaWT?M(=B<8zv9A<`#Zskm5U zT;QMXr5D7YI~%cu8uXNCYlHBaXeAL%G+Kz--+ep>%r=fZrS78z1X2g;7B=GRO9MLM zs{wC@um(#Fv0iEyYt|z$^(yj^r(%2QguW3qyj&3F)-naAjv&H#(g%1s{wqV-;$qEDk?iQ=l{+^;lUOLF zG^GtvhD}0)tbV}Ou>n#x={T{e+8JO}KR`*sYGQ96M#C!s_4U`(QJ0!Ql~86DY5joY zZ4~5n7~$Q`Pt0xNKk|?d&ll=$6snAlho~2%La6kDnb6f=;qOEuQsIT6$p3v4`>12p zC-H^Jg+Pa(g-6lI^h~I|gcRVRgBKp3lLA2DM2P>>i+O5O;6HobzcpVHL+@gE_sDgb z5fB#^)`CUB6PA<=0h0(l$Q)Z@v=r_b?6$?fp~XGj8uwK++U?mQdbxvtZXh`@rH^kc z(`#s3vI0_prtL}EP08==)hn;Dsfok4gGFu>NOyaA2wOgR1qB@uP5WvGot)7TW60vs zxTCw#0^LXqywU4KX3E)JI|liok#=y5bGoSEzLuQyX)`O2XnD9{c_6}m>(}SnYC_C{M1AjxoG23nlUKnlLkV7_oBE35s zr0bQq+zuYa;W+=av%3os0w0|NoHa4Z9>1nmVyR(@WW+-$AmRz_-R|19jd4FE70>^@ zlp?;dY!RqyBUr`ZN`d6VVuz$N`gDIXc$qG7xf4DapO`2%(d;0b&Yo#ZOl!P%$n;X{ zP4|3%c0;fB-8?aE&X7VW;U->(CQSIaCce*YO&RIk^Ny(N4OWu>=}_gO0Evk7YB7od zdnJnEVhd)e&gFCVyA4dVZ05lAB>#4>>KnyNSyX+5>;-8TVV^1;`djKTpKlprTr~ev z!qx7eJB40#=}HMrhadJ@Bq*nl<98(V-{e6@tasQ6T-JVmxwLz}!`qs}Tj^e?ElN)mutdZY-IM0-3#h-p{YR(sRjutFI| znyi8NC!u0E15}y+*PHtv$u6r~lr*q+5i0g@!s^;#i~BmqXNN=wkCFnfFJ#sw3P2Ku zQZt?GDor8%FU^4bDEW8ADxZL@@th`Ki^N!|eep?tByH%G;j@SLQ$)YR>&ogrIeGUb z==8$c?eQh8Kq{2sKMahEmIbHZ76 z*YT;ZuBx8Mho_x|y~S#a^Bs!-39a(h5AFK_qIX_OLcTsBXndN1>tA>J>IQC|{L zj+FW@;cOGGmkr^tmV=FnqC=$1behWG)WyL>Jxb~88Q^{O3+8%Cfs&z{g}A~Ozc!gS z7O8zt;fd}#D6IdhBcck%wQW?`NVd*1+dQs~TCzhUJD zfwg_)C=&ZH0?=rpO){aXD72D1y6N?qv5eQv29}lKz&Kui3JsWvVmZPidFU{|2mUW+ zoX%kvOtE8kfHb=-8R%JZ0XBD$lZrXnN@m~wZE;%Sjrws&Yu!8X2TpzjG$wY;?%(1s z7q7dGRj+rUn92MZ;%Q%M9V%RjmuuNm#L1oh>^5?WnZRhcq`vR;RJa6^W^kL zWORmfKsgRz#t%>YsgB3>*Eab*_Osb2PuXT$`sp0 zPcN8knrukePLXk9&Z_NxL+y>$ezD48BQTg8oLg5JKqALTu-wzRx!OIAI4&dVNYr&& zBfik~hEg^`2^xg~QZ5`<6RwR}j5<5a?Lb;Vs^}hvJ#t{Kx4^{Tk8<$7>X`JSi{x0< z7+-;;;M#Y3Vro!b?(Sq;*@*I+ar#js+)m67DmkNOj@&o!9Fu?!-ungUf^s{>p}bo! zS-}EQ6f369hI`N2gLSNx)INXC6{qV<+aR7*Nmx7ZRnE`kB%BcXmmYT)9e=21aC?>J zRlVL1zl;uR81I%^7{A0-lU3Oa4<0S6 zLp)XJLM?6jCd$|eJe2G_<%Y@hIGfr1LVczPW-rph$NaU4y+yL-VP|YlkW4-5c3Ofg z&B_#!!JlL&_3U;$*e1PxL@m3$z9{{!q^jYVWgjJ=OsG9i0_m0=HVxK^^xiF!)po!N zr_SgAM;z3w)~43dV%C)+-~$2(T4SkJg7L zk{V~Lf8~`?y7+ybEpCfSTPnRFzB!JhUMwlM7I-bSnmCKE5`Nv4kk!ccm%|^I`y82D zb((e3vcmi9UPW!2U91%|XU|q=;we|i62dMb&iK4R!^sX-mxpdNT!V+|_Tz&l!{#Y|-f(coYypuZEe#ArkfXX~_F{)tr+v8{HTkRcWwJ<`TaV#lFQf?@aMT9l{B&K)s`U^}~ zSgPt#sYdv77=q>rp3nXCblp3w21;x8_5@iTg0pU^jjPx6@~JS_DVhd%dJV=NRqhMV zj7od*<7$}g3KH;cf?7}<8%thWo6=lNLAD8cP`=vx!b$d&Xxy}@ZmPjTg5&1fL6)sw zoT^hZD|J==x`MaO+jZe~qu!qdgj?IU;hsGLOF%>2OVd4L;~FqG(41>?(W-2k;?eTu zN`V;@KqIkeZMx{xeuxIfADqNg(Px>;wXkruTf8`pMcVs>35}29X1{o zmeSkJ)Q%nPKH_TyI?aSqSALIAOVPN$;+J3jZI05lRO1?P&5Z#Ux3}wC!j3XM-MvRl zCCrS&cu-_Fu`4u~s?xw}^v3xs{`6@L*z9e`08ZRTG~XM*z> zWWP`DBA*R0kYvZ)Ak6PfFU}vKC#PmWMzfUWhtvgf_n-)_*d?_$2QqPxGkKVC8WEYE zjof*5ROQS#)CxEeb;--H+b?4-DY;Wr-M8-&nEHU_ue*hukI@>=T>Ko9Li$>9xQeN? z1O)uY_j>}vzD74siun1*m^OrnH-(KHY;&Po|;_C zXj2vnBldg`Q4%pZ;+CRD;Z77r@Qv#SRh8?zENZz~+Y3ZZ>J94FwQB2bK)kVd@KJTR zgZ3eU16E1;Sy zeDJzVj{SytTqn6(J5}Jhcj*1n#Y(X)$@=21dD+t9{2}_Vpk)Dc^BR_Rp(l9{g4%v`vrQlh?#GlK`ne_XiW znM*>Rs$GX?`bK=qWs#!e{e(#tzITS$K5)N(T`#a4r{dlPLk+4Rf3%2O47cB5Tup3P zbGAa}7Mx)_7&u}0Hh1wGEvKq%mZCPcKqz@u7DuvJNC-A)%Y z`Py929ysdq%(PW9gE!5XgL9atKGMb+n{lXhDlpEk+4|T}8)V-%Ralz*=0 z9J6fc&}~!)#p!;QdfO@}OCg4>IM0Jb!dZAh42oWR0aBanh|>X_gHfPZp)Nj8Rx$jIyf+gipj9_Hts=Kb(%ux%hRI}eg&zv|hdYm%xMi|dRMIQ(&K$Q%dBlrZKfy=yOpjd@o_ z(OZg%c_*p&2li@XJcz*m?Fp=y>U-4Vr6&ut|Fy`jJzoWvyV~(nJ#Z_KdQ`RKU&4n? zK?WmseifvN-lUMRU|>HY@SqesYjCip$EkKg;T!1lNe<_X!AAZvm#wXu{0vfF_+5cA zkYL5B#gnnFa&D5Y{wuswUXY=#-&bwf|Bf-Fo%v8S5PIGXcMv~<^M4CKjCEbF$kI40 zqVgzDH~ylFDILx=BXMU|@J%caqDCyw+{xON}?O9Zx1wzKH^Ho6f zl3zS;hl4TpuoFNCh=_SI$(6Dht{i1*UXzmoa1YYCU*QG$plxyMmSx4=x#^S3gfGR# z#O!t>d#g3eJ;Cc7YwoL(pNMz`H8s;Dj+E`839C*Nc=C~(Y^l^_kKmSTZYbU6ANs;E zHX>`#TkYH*JxEPC{(zI|3=rAkch_hMyoCxg!1>6PsF{6rVF(c ztDyx8vo-HHLCUsEXO!}SvuPIAoxj)9+3m^;u7jrwF6Gx;Eiga} zApk6T-H&k@#(jQ=3`x>rAq_?oiixOsOn8}IvM>(V#_>s58Ot{th zIeLrIapxwyJ`UE%vgGfq zsjoJ^Pu!o)r>5p781$P5qmup$-sY%&?f{lgI6+X;<2L)^u86HQ?g7F;*NC$(QUT=1 zhR3(7Qa`ZulgeMN91!gz$vMyhp434Qr*z!hu(-G?xuGnkGiI}eD6KAwM&zIV6aM_1 zWmUuN_XSsAS8YyDZa0cp-Vn*WzZ!URRJ5{}se(#RefLJxWEdQEy50yUrnL0ef+93D zbOZPjU(b5MAJXj)%kuC9acf>QsJtiohEznN+B1J^YrW@VA>Z0bWXjilY}26FJB$XM zyYU*>`_>Z>7#thF>)kUpKK_li9M6Hv!ydElWVX3Y_YHY1BRwl?a&DW7_er(CeEtt} zz|SFtZ;OW2BeCfs9c4i$Br9ykPg@n6|rF%I)93RTPItL<&(C-&+t3#*!RYZOBzVk+U zO_utPFeAy;na8HXmp|gDri2@Fe z#Z@kHezsS|byQBs;`uJjfW|duNZ+cCr?`+y_^kA2UKJwmDVjw+(tlxq%5GSK?>_=W z1M=BHKdC)pY2mSJ+&dw$F)^9FUv!X(&p++Wn#z0Z7sJr=U=b5XBg^J;DtHEnm~kGz zepnRYvtKd(8h&4!{x6~RKM=^ju)zOqZZ3%T0PsjihAZ_)fDg8X83qOf8u|jA`}ENd z0T$N47;rKs%{^gmE`KcJ4*BQLpBu~2LdCGEvX*O>7k!;0CV#(RVC-cgca5bg#K+l( zuvsj`q@{&T>+282;4fGI%yd~8`QXw%;Rq`yM`=4R8`s--AdL59>>2MHlnh8D|GBhY zd+Ul6%~%>Ajx)J!D`8ntq41h8qrzQvIQvV-SY)Dq|5mPP%j*R@V0ia80$?1NhE(18 zUWzy<7K=nh`WwH9ci7NnQ+D(;@|4pMOm9D345q2(NG^lrQ|6}GXS4bq8WQrAx)Mb_ zI=wgj#(F>Q*wb!kusp%a-^<5zijT%v0q=Q9MA=rWvh&P5?Lhcc(^>fLQBJcXgxC)wm?qT)Tt-{K7BH)wmuRRkN>|kjAYp|DfHFbCc?f(odTJPPRCa zX$|DM=yIYOSb;<*XEdGEOM=Bl-9J3v!oFIFOj{6QU;<@2-N^F8Bf7VSz76hOSjm`QUP; z1sOvSRK>5#%fWBQan(t;6M2_<=pit%BoGKn z&i({?_0TgdswnlU_4l;ezsvZ$;^5;^CZ)r0rT5EV-^-eghu$Qf?FAUU|6hz|S*dj| zup#?AN2WTIqylX*JG)kYn|hA_c@ZS8UCsS@tMC;|+13O<)#s&(S8mgZeUhIC!gr|8 zV|b|pRp!7M5XRz;>U?CicMfK%;jv}<_?67BBRe4JcZ45mt8ta)`*#Gwc2N3st`$ZA&TmDx<$5;imMy1QxfnA>v*u8@=^|@683!Bf409cM|5HU#v3m`pKEe%|X&xjh4xC z_^{7G6WQuG(Dd%`X$jJDw`Al5L1Oa4#)hqObdaCnJ(ZrPHA&v^w`K%Mn8_3`Iajx!|kjTwKA z4UIm08(M=MI=-)G;bCsPI!1NMu(THckwH{Fb@zD;cd0uED^{MbZsM?JoPP5S02=q>G`tHwBjr!_+4W9j|DQE z_hvNN6#J8gLZ^9MW$D8&5vN~Y)Gat_4CX^;tYmhBxm=i?980GP@y^Ul#-tu)ZLQ;A4P<~0uEy%>hQ`P5ic99i z*lCQrYVBG1tUIH5DlLLHGUPNh$*D2-vL|(8H(%7=G~V&k;5;0Li+%-wk4BdFfEhmo z35ldce~^HA(?dWx?m`oGtoyr;h|a--v6joj&o^qe&xbszETv2t%l!)39fM(K%f+Rt z*>I-pJPoES@EAZYk68pS-%a@4W2W9(^kh3S< z)+3YV*v$?QJlz2PDXwV5a#L3a9+fBTw$zJ*>ODSm#jh4f1=R*~nUTm`cMDEjt5epb zHo5(#2z1(^Bs#X)_aD44=mqi&MdI>z@*og7xgsWPdYeI|>&!|XJ0^t4<66Fu5>n*s zf#Q<0CS882Q}D zIHr(i7lw3Cez6807lY!mWrXqv`rL0j3l`AIYgJB( zzL%@0t_{x9_s0#E5L4LN--!~r>3@NHTbleWa6en=<k}Yf9^W-b#&^w2fO^1i4n}NZ zR=C(gj;d-tKbPMpf9qU5PsztOr>vE3Yf~TVv&6ykc5>|rR_BY)KQBY%3#>Ms-fBpu z*cU`2Pp)ewp+1({o-~y%X(Iw}AqW*=irsLC<>Kn4ntTX*Mh#{D5wmGQ$MQo}DNmNb z0viICv@A&-3O{25GMW?pDH3Lo^5SPtI`p+RgWcY5ylI;+^gJ&(e14_wMear5iELbN z4$=86@b_KAWM6jGgC4ioX~YhvjVeIv$S(EpG8Rcx-9LASOl0dv)i@Jl~l4HC~3xe(9HPW)&IoR}ENlbSV| zZwm z1*4Y)=dF3tCR!)I&QXjG$(#ONDL$>T%x^?0E}P;1Osa_gMbnVfT>`?bKPv1Z#9~eD zZr%gn&l6JBYONnAL<^*EC$pnwwU{YC7NQkG9eJgLIHh7{j+@ROoauNq1`zN*e6Vfz zCGfgJ#X+5XzA!>fws*{kAC&Pr>OUm%|8gAfeZX`#-3QI`RUhZM08(P|qLsq>e*Xt0 CdnSkg From f3611a012773033e020c83230e3124df6351e195 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 22 Feb 2024 07:40:29 +0100 Subject: [PATCH 0728/1112] chore(ct): move IntelliJ scripts to Docker folder This is very much Docker specific now and should be located within that folder. --- {scripts => docker/util}/intellij/cpwebapp.sh | 0 {scripts => docker/util}/intellij/watchers.xml | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {scripts => docker/util}/intellij/cpwebapp.sh (100%) rename {scripts => docker/util}/intellij/watchers.xml (100%) diff --git a/scripts/intellij/cpwebapp.sh b/docker/util/intellij/cpwebapp.sh similarity index 100% rename from scripts/intellij/cpwebapp.sh rename to docker/util/intellij/cpwebapp.sh diff --git a/scripts/intellij/watchers.xml b/docker/util/intellij/watchers.xml similarity index 100% rename from scripts/intellij/watchers.xml rename to docker/util/intellij/watchers.xml From 67ee8b7835bcdc0907083389bfa9d2fe1256680e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 22 Feb 2024 07:55:38 +0100 Subject: [PATCH 0729/1112] docs(ct): add README for IntelliJ auto-copy save trigger --- docker/util/intellij/README.md | 13 +++++++++++++ docker/util/intellij/cpwebapp.sh | 11 ----------- 2 files changed, 13 insertions(+), 11 deletions(-) create mode 100644 docker/util/intellij/README.md diff --git a/docker/util/intellij/README.md b/docker/util/intellij/README.md new file mode 100644 index 00000000000..281d0e50ea6 --- /dev/null +++ b/docker/util/intellij/README.md @@ -0,0 +1,13 @@ +# IntelliJ Auto-Copy of Webapp Files + +When deploying the webapp via Payara Tools, you can use this tool to immediately copy changes to non-code files into the running deployment, instantly seeing changes in your browser. + +Note: as this relies on using a Bash shell script, it is pretty much limited to Mac and Linux. +Feel free to extend and provide a PowerShell equivalent! + +1. Install the [File Watcher plugin](https://plugins.jetbrains.com/plugin/7177-file-watchers) +2. Import the [watchers.xml](./watchers.xml) file at *File > Settings > Tools > File Watchers* +3. Once you have the deployment running (see Container Guides), editing files at `src/main/webapp` will be copied into the deployment after saving the edited file. + +Alternatively, you can add an External tool and trigger via menu or shortcut to do the copying manually: +https://www.jetbrains.com/help/idea/configuring-third-party-tools.html diff --git a/docker/util/intellij/cpwebapp.sh b/docker/util/intellij/cpwebapp.sh index a823f8871ce..0a59463f5aa 100755 --- a/docker/util/intellij/cpwebapp.sh +++ b/docker/util/intellij/cpwebapp.sh @@ -2,17 +2,6 @@ # # cpwebapp # -# Usage: -# -# Add a File watcher by importing watchers.xml into IntelliJ IDEA, and let it do the copying whenever you save a -# file under webapp. -# -# https://www.jetbrains.com/help/idea/settings-tools-file-watchers.html -# -# Alternatively, you can add an External tool and trigger via menu or shortcut to do the copying manually: -# -# https://www.jetbrains.com/help/idea/configuring-third-party-tools.html -# set -eu From fa61267a1de69527a504e846f5e97d7713bd166d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 22 Feb 2024 07:56:22 +0100 Subject: [PATCH 0730/1112] fix(ct): properly quote var in expression Thank you, shellcheck! --- docker/util/intellij/cpwebapp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/util/intellij/cpwebapp.sh b/docker/util/intellij/cpwebapp.sh index 0a59463f5aa..2d08fb1a873 100755 --- a/docker/util/intellij/cpwebapp.sh +++ b/docker/util/intellij/cpwebapp.sh @@ -7,7 +7,7 @@ set -eu PROJECT_DIR="$1" FILE_TO_COPY="$2" -RELATIVE_PATH="${FILE_TO_COPY#$PROJECT_DIR/}" +RELATIVE_PATH="${FILE_TO_COPY#"${PROJECT_DIR}/"}" # Check if RELATIVE_PATH starts with 'src/main/webapp', otherwise ignore if [[ "$RELATIVE_PATH" == "src/main/webapp"* ]]; then From d20785a97b392ce493dec84357973412d070d0b7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 22 Feb 2024 07:56:46 +0100 Subject: [PATCH 0731/1112] fix(ct): correct path in watchers.xml to shell script location --- docker/util/intellij/watchers.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/util/intellij/watchers.xml b/docker/util/intellij/watchers.xml index e118fea558f..4ccee125ec2 100644 --- a/docker/util/intellij/watchers.xml +++ b/docker/util/intellij/watchers.xml @@ -12,7 +12,7 @@

    X}_^ViiS04izS;07{kVdXh*pDzfy$!_|y6v^@UPj|KgB-aj*wYgud zh7u;Ic0+1wz<6+ovHYk|!1c5;cnt&`AS=ytEkaZv@oq7MV9r(8fAa$)L&GS*G z=yr$R;+v;O*kUT1{E#JEQeq-$5RebYgQk+^=H?cghQ2xVRScNcw$+-z^uy6WSi4i9 zuZ(|#2_x=d7ueZLewZrhtBXiWqb&H>;MlaSI^5Dn zlLZ`$zzSzC4z+%nCOqvSnf=~pvtA&Bz;jJ^UZuLQ;rr&Ue9c_9@#@Nx!^XxP8G9Ns zhZ&;eK&oFO+QQjru>s!u>VLR}m+(DvJ%mbu)_x-vL z;4BgOUI^bmf!*Kf3U;+_t#B@O>OGe%9_yU+RY~ZpEfheS&V2V$EH8FDRa9L1O!~7< zyWRu_sz{cl3A!$mx5}a3aB-nfzy#}4P9;Xwf`qFnOZ0k7Wo1d`czJDwWSL52bbY}h zZ72nD!LU&w-8=6%9o-9bV_okP9kKrdsYR|Ewws&VUx1VRbzim9^?%$G&Lbo3j4z^L z&~AnmSMmfH8qi6N2ZBF!19ar&%%hK-j4b%}cu_@1X955TDOuU#f&wrA zXJc)h!RbH*34=^QN($b!c2;}21~4RE_Y(lbnT>zK78MnZ&ddM}un}l5Ma9Kq)6?ej zrSbqDlZYdbwzXwYuS!M2=lG_gG6c}HvC&b03P$U7xibS+1JEmgzb((s&IUZ|v-~M@ zR;uI*9a&<&_YGj2si~7L0#AJlBB+Gv4Y0*o7LVL22jjZ|z`UNyC- z7+7jjW~8pQpwh~e6oP)kuX9UFV!(<5E1K*`;T2_GQd1Rl3jv7Z%f|aFu|2j*O@%D-X^HzE3t= z&DSs6`abTZ6YzN^Fq;fJ{&gB@0Qy+;_wQDgmH_cK;>gx&vBm-%q#XAZD^oKw!c!G` z@63k>&%5)DndkGqnccBuTuyre(Fo*U071K%S8Q@W)dr01^13>Uv%eicZ{_p_xNT^7 zxU!z!lr(kj&kz)W(YM#oK;~e6{_FzOJPW{1?R#$3Vy{`W?xE?A4z0ip#7g&iO#FiNt5t}>$9WUN!_uD@JS(1{_^Zf3#KB78* zNI^>*rDNX_Rwx?L+QAfE)ZC#rb^%Wsj4H)lj@{3)e(g3EHrllj1|quz4KJ$s^10S@ zgS_OfU$o9|n{2Xto&v!kCGy-rTCe;Lm-vX@91d%71E}JbmJsst3R&4|cz9X$+Q$vN z)|pO6RwZUPq&$D5H`>kb&ez1PWzeOgq9pS$d-Cj7TQA3`^S^0nNxQhPe#|vRMZsvb z)$XtV^C=KO_w)5Bpji_S$KVcqq=5IlTU1^i3E&cxKd^fYA z)!Hra-+vA2IP{47`tkvr8vCuQ_JI4L_fah#8-D30C$@l`^ zV1YV3Gnzz+N5~(7+=hA<;yOXliEobgtQ30HFH_7}B_3e<|5;po zo>D$=<+1O3b$_}&9@+l&{SlCD04>IM=uTwRY29??$poqem^9zRX2=J>2gd*5+5-C_ z)7@Uz5C5_qxZYo`KXlhemH?dnK&Zd2^YXj@{&j|ccH(&B03xsNx%H{lVPm?{Y(h$2 z-VD%4fByV&Es5v6q%WzY;wCfY~o-7?=S<^#wRP$M}0hq9@xscy1oV?3J7T>Jw176 zXO<7-RZ>!tyoo8Oq@&JpgaF!vi-ij#HUc1Md;v76*6m0-If6h|JUVkv5pu3!hrVcY2K))s4YNJ71!4%(ATyH0}C$qhiv+` zBNq+;&zyf9A zVp`g84!4VDgYhT#u3VAr4d`0;nzbrI3!YvpE3G%L$=7jZkJ1WawCqca_M-)g|Cy>Dr;)?0FAqO z(lqlA(8#85FONBZGUB`ZiDA1`9|VB&y3aYk!1D>K$K8bCS0Lxqz&gkT8seuaWYp<@3gS9sobaESdG1 z3}7Ff1jrqbsPk`{nxjD5WX_%f0__CYF&BV`;0)a1tJ{$(utx!q@rUy()T~fd_fhDLE)KE*6B=QQK_|i3LL0nf0ZJrmuw4}+x%Gv>#ULSp|<7%89 zSJ0m?t5iu>_ixcU`?qWkJ7XY7AFkMcxVXDp@3bQ!BO}ue9LeNyqXT;A7gAEv50j%> zuM0bSYWV*cd-HHE`|W#NlQf}8AtIHYlnP0Plnjle9*ImDGek0zDHUaIMktapW}ZnJ zl#qnXQ=(*uOqsrGbI#{F*YB_2AJ282L+*UbK;N9NWmaIv(>Qodp`}&gD8b66%v6z?`F{lJq-#?xsRtTXQWo4GXT6Cpl>$No} z@z6W&YnT11O&LS(eSpsbWb43>zF%1)s8b!M_Ay*K{EhdT6x`6h(Uy61$XB+&Qc{of?Gt%CBF$$|RTY%34&b@j2N%OS2)3?TX>nN)414?z3~+IdbUXUghLqCW}%kb2l8QW{P@X(SdS zFhc$Oh%Hu%pS}aJbHLDWvw&*Cw|i&JjRS{NS6zF4Dw-~6S!Wy_7!-6vE3p&I%D15! z?PwhNiHXafKYxB8W*JfLd~wLI<)<3Y?%j906LS0z3|?^sB%DdQLWzVhHMSojvpJCbUlim11$3g@!5V)g zeZhi>OQhzD_yXR$?>4Re^y$<7lP6bzrPKZ0_6A}uf@J9p{;)$3%R!BB<7)bVVmM)t z80I|KYqeQb(`NPfa8HqL66N`kzhKgSVMO`Qn|nS=fai6>9!1^yDnegssFw5@>VgG z)scn!_4P$!KD72;-e0kotVC+BPB1SFL6!rd7GaVv-LI>woB88ui2cBOeo9veo$JId zx`CUu{P{fk627^RY4tXseP_u=Dk?!@mKzY6H%L#7 z@*%JjJ9#Wf=vr#32q9pX(bk1Wjvn1R`KOYCfts2c7#_P373|`c_0yvZ@q=FT_%f*x z4IH=HOiMlo2M0=cU}vWW?iYZqtZnN%siFcx^G8{Lc&MVHCmd$&)9GOG`USb`)%slHxjENqimZ#tDS*r>HBi zgSf$aFIsdCc^v%lrvg`36c}Uy+r0UDSJ!#6U=?1}Om);KsYu98{1Q~SJ;7$o^Z~%u z(6Zf6qBpf&gi1c-$hB#c6QcW7PX(PtOYHVsIe-3{LiA{298Sd3oH>Vp+p%UwFWu$< zKoPz{g8o&uLJo8QDC?E$rwbBt%f#B3BHze>!@aoWlr8e=PUpYYI2^_BCxEmZ7`Oy5 zwfJRmAfE#gTP;|H&9`r$G+D7`P0h7wxwbRAFShJwT*mRu{C986L0^CW&IoV8OA9Xp z%i>?no#wPG8It>0r6jP9gV7N*#X8rC-@P3S@V9*9Gtt|9oU-HgzxQ@?_+ssf^40^- zXE_x>bLehTsu7hM`G=R+{mGM!IGx~-G@v$YAC}&`HweI)QIoIxXLPlhXpV=tXHg+Y z@9@RezdYLN2~>7cUHt<<*bN__Z^UB1S)=_Lg}Bp*IfDMTVmRURf)O6@tlIXO z!+V2o2B<|;fa~;lGyT1mmLd#H!B0`O;y10&qr+bPutq|C6(Q$M%F0LKa)NhnbanG21v1)k8fU1qavtjFb#Afww-z5hV+YL5_NP3 zuPl$_ae+_!9k&AyoZ-o18+Gccv$rpc9C3^v{h78uu|_XVy*9VQXz`MalN!09ZEZcP zR$D~I{A6&Hi;RePnEZJKPURNCts8`1y&6r@^YZce<8+CrhSE}-5sx2#`=PIF!tUPZ z{5Ml!q4$j&mUdF}J@;io`%(Y$=!dm9Nai2)8Hp16mtp^H-TFaF35AvO`0yqHfmk_H z8Q!@)X$+TZ9%N?!UY-!o+Ub-ajhT8P^`(G=!r}WpX@0x4c;;3xSc`l zVhaQ6?4|3(UcAv+0{*r4tvQGTK;7k>oF$+Uz81JjLsKIR06`rwkCxth&5pNbqPLNV zPyi#$aZRF=)2rdn!fEwID)02I!sLeHOs#Ij< zt_HJ(dQ8HqC$Dcx-q_U4 z%$jAmMe1fSYg>(i@+xc?q&*NfIf>U$??H7N_Xf6}*v-7)x$Vn}w?#iXAOE$79U;#{ zqtC@?azQRgcUjAx?aRB;Q&YQ#HGuR^lrUThIsS*^^K<9U0oAA9vcod9;D>-^&AunC zhKYs62#BP&`?{@trfJQ6V^!Dq`1o>FJJp2gdeBF9dcrlL8p~ufePyH;9_s(b^)UN- z-;jnjr(WC=UYCy4d+SB=1=)Hgc^0RS$Gded+ZfQ(8yi;8TEd*yZ@D}nHWp>ci%Q#; z6K@aBdt>)Oecr>Lj|FqT9Xw#YMm79-gp+abbsRzfL{!KSqS{h^MWm!xtsdWTm9?kE z^xuTu#g3Bi_|6Y)sJ_cy>nA!qus1F!v6U-SksnMLmxKX#x0Tq#f4_WL!OeDalgNBK zg;Ydzlceul*9Z``CK($(Q1W9^ME!fu{APkbQil=3PhmWg|OXx^}>s2l3;ce*fi zvdTDT#mm2S4O&XuSTEFcv4jp4wq1#>Z`V~LY{gU2_>+MQZ;?fZ8_to+8&RWeM>BLmi%%i}VN ze!CY)v~w-uPGyK*5YNw;d~sEd^O>9r{N?$Bi-5CyxW(Oqvl-VZ^4ogezIIeKdE)-E zhn8*g|32kX^N-!K7-BBUzpBP9{2Mo3efaR9?T}?{>uvi?gEq*igu@YZeb(*0g^FO2 z;hV%OvGrS@Ut1&NHI4GHP&a8l=MrlLGdAm&4PpD!V_BZ$bnj%oa7NA%{?bNdfe{X4wse&7nuqD6X0Si2$}C+A|n+Wz`lI62Z>4%P~GzB_mC zE@xw-TCrh!wA6UXmmk*KvsVmEs3q3qSZvBsKjdV1S0mnEs;1LH5_^{vSkwZZ5PODy z)fTP3^t*)W8~-zd!haZwL;dGDP8u zim<4+F$a?A=FOWKA(kX)=75bZG80WXSHy^e3H+ir%Qv4=SHD?t9;(}Q2((}(6v3_$ zEeq1(>@k6xubT3lz+-5Sc56?6Kz62h!6Mnu0Pck!DHBe~OCYR~-MUcJV^>4v)Da3o z>@CPCKMQB?fdg}35rmZwK?)_5Tw;wdk0!I2VtBzOT`8+CDweDGjsQYco+R0Hlnr z5Ji%@rL+a!zv8Yh=6SIH^rC^h1hFo`y@A0`f0v|({@T9vjbTbs!6Pq$@?b@EnRUUo z=bxQr$cx|;6uQFHaYBLduJ7*4>D|(IMDGgmnKH8FEx_{vmEtcoycXpvusotXpS+Jw zY4Gm<@PXV8-=4<9~I ztwC(g61HtEC?4PF3;l+#ARs)vJkzq>?O?`)u(0r0iT2bhc(^|ZmrHs)HTL0_3Z|?| z5NXEwSs6VDKBFH@@x@w$`^u+Jzok^%RS}s5R+a1)c!Y4YUhKH5I%7UoRxSjabG6E( z!@_l(Q{Z$QX~^LJ3HXE0da_yh&)TMgU69~kY6f?`7noBOC^XkX^Iq-GxX*L7YwOC2 zTey=#6N(3($9%ZgIR436{ML&0=W+3RJ=LLR^;T}n=l#94iqkoB(PtOt9RH zc(-?-|C6u!X6HoN`lkJ!tMSkC6nad$IheCgpH9F$K$d_Z2la5c7!W9o4VnPY9O(T;vPpcf~k6qMkEe;X5E$}Z83O8X3+BG=CA)psnRHW&r>57C}KntdCh;>J7wo6lc$4WBCAlh>pa1pf&^W z5qK6Cw{iP+Uqk>B^Sl9FySgaZS#L^UhIOS+OMr+GVFKoQtz66?J5q9OjApUw zEQ9?ED3X1*$tofRA+{?=$}4MWm4WJi1K9nMWE${z-FA{b?44&XaVpw^5ZlEU3mk>2O+Q}BUA$6f|>6P^z@US zvJZ3k&OY2z|1QWV3^gwkzWY*c$p;d>nW$=se5axq+{Z@pNi1??Dra;E^BvAMt)H4w z%#k?NXY%ktaPZcm!wr(W{H*718?58>mLJKw!-StmKa^!bXr4`oz3{NuK5+a$mK;N|n{ z##0^bT3YrdIGj1N7O5n$V2Z;H!AU3S%=z=@b-*+e3#(z@dX1P46%{#qd$E%HJ4kFI z&pR==%v?Wvxp?5az`g?$>x;E250e68enM5%^NlWII7nVa8JW4Twya0tO_T<<8WBar zbD#?Onq|d5Ha14(4fOf!a%^y965XUTv_av)M`d&A^I`s^OC3i@wID-KL&ws*-ysc2 z#Z?L|P(q(4Ch{Y0`uO;mk4;-%$a(gRX;gd1Iyf)b*yK%2Ofsg!;PS3Gf2UcA_iD?h z-4#w-dKhdro~km)W3yj>*wj6yZMVN&X`WO4MT16MkN)&EyMC)R3U6$_ zyW85vz9}#;?-=7ecH{T2W#y8sOXV+}yp=hWJji~hvePxvF-!5xA*Q6sONTjkvgRmv z~_vNq=nC>5=8%H57k*;akT=eP{ z#Y@yQBBG+Vz^@^Oot5`ld%)aW5DX9~74pjA%N##;Y$S*i~eI*R^ToaFMbom=tC6zLwLQUE+MU*-1;aAR&R*VY0M6PqUoGxi9~mR6(G_@S5%{<{Ht< z$E_}?s;Iz$^k;qj8zk{XEG!m7>$-;=wH!KD@0o_!s8Tx z4m{AwH@v(c0&_5gl*+@{@&5gLG*?8%Y(;^J{{f}W4t9-eKl-E+y#YQZ21ITaP8l?B-a-8*ZVle*O( zk97&ur)h^d%a3!EezNJY9J=3Fm)kTK{h1Z}ZbqB{WK@hcttdJHwM13hdFa)GC2YB6 zk9n%*d}WaycM@}!{8ydj%5HVW!WQYTLw3upl}5I0KcOBpc0O9Dp}y~C+|9X9-n1nMAJeX@savG`YmU^7>;B4q#sY=+pHokPDI2;MeS5`K zEE(V9pZKGx^74hE8WCgmrWEQ#e*dRO_RW5g#5N{g%#%* zxEkPG1VvU(&ZA(v6LSX^bBda^R4A-p&;rBgxzA4ygR|6f9#TQ}02?nWCr9eaXQ-7; zuG7xQX|Rys!C67kRX%azWV-?rq z>z{_#KY2<#IFauNbsIze5kEG=&XOc&4+bp@W>qStuoMF$ZkS=wk_nUt8$%=ta1s-UDU8dnl7wld<*!i&#W% z;TwsgKOCC}%dQ#yb2V|mhafF6hYy{4v&d6KQI{k@_=5SW+f66K%A2Bi!uS{*6-S;_ z{-B&{kMD;h^F608za8Y@r9F(6%L<$kV5pYcz{lqR9)Ap4+*g!;(6b6#FXDiH;IeBd zL~UaKJ;;bI0?zUq%HPhY`&zol_#cCWJrOB5I5}zULAQoDxdQDIIHkP&rNB;ZfnB4` z8ZcP_ZyjZRIdnW-1KS1Rl%OLM|?pV+L{hyOqb@k5>o3BFhLM?sd#EHO zs7}^O*lxoQQBB+?iGqp|aE-qqu$~hN+OlN}oVqJluYSXB^sv?ZSgjV|611^PZ&s^< z`$lqaPsr(wfB?Vg=)D(?LnbgRkRF7>nxtFK5ZeQJbAy0@H<=?4_7Swtq@G_+>xZj>zvV}s z2hm>$Kzt_iA}@9y3RlRMnfN`su;Effabo(v?$Cn>BwU-cF|lVd5!%T?p!e3N73E|UL^9MZghBDkGf3>*Cu+L0inG66) zFbWidZc#(D24_8^I^z!rYJntpSQ@fN*rG`iF!1QHV?GjR#o=O@`dd>#J9ww#pYwzd zZE7^%A;dZ0T#<(WP{%0Dff3HAk!II&NDE?pxr5NCTD7PDuEAQK!xN{=9~I2HiX#~e zvjGKrWGf{wLPQTk8P$P!G65U9jgg6__Aw$g@Oze*z$A?wiYQS*J7GJ^K~{rh!K^;r z_qL+=4M>>D$;ms{E(OxL9TuhtGbF<4Gp9q{Rk5nKQPm>1mXnp`XH~4jcuxlQDlAZG zTS_@l>EXmsnhp-;02KqZ>#n#OZ}tHTvsLxC=c)0^1|VqBy(PWlE;30If=1*rHD-kF zx%-;7>^pM%=Yn+50SdYd%btd9twHZ~v4oCjrSLLR_faoW00D3&?-)T@kmvnaUkJl5 zA?egF)d^LMc4~}?A#AI4`L`S_U$3q%>h)!M??g@)!t8ydt0ngox$>vRik?9)ImMH$ng@7 zn3!Xs;&**~mby&*R;W(UESWLS`L9<3QbEKxAAB8164D|-1{-`a2rl+rk3A`dQi>y^ z2_hT;u9R9r3Q(IQQKFkngONueHvK=_k; zStKMSAELy0L6sirW?<&AQVW+Nh?2eLVn4T@{q)0{szmtzc3~&OGUosf2AVGznHR+2 zfC2vktgvA%8mIX|Hbx`mBgEtMApnsP8g%i=4~bXB9L4{>t1)P_7|PzCzQ^S(Yedeo zm=CD>6=h}Td$VnC16&}hEq|E(6~rm<2SL)V`w)8D(4x|cQtaH&&nkFa?Y5gjI?HIZ zrf7;zz9`$ z1yl!+0EAEv!X@ID>GY+c;Uh&A9OGj2e|VJ|w&R?)!N#SJ8-YYd(0A1}f-=@!Cn&q2 zmEB7#(P*^4RaRD(I*!OaiyRFzKYS={*5c2fJ>wA+^uv)~ou{RQNQ$pa1Q3Px7nXE2H_}3t1~@rK+%}RprWFJvL3x2I0Ig)-1XTs_h|c>Nsm zewXO(bT?D#%G=LW!3cKa%kxuve>}NOo<_l#e^sIYe#weKUIW#m*eem^&TioB60*Jr zvuUQOZDSM6`*RY)ejA%+;v{tbk+ZQTA{=9=Df-O+;NF2bFBjseWTF$OaS($tkj^uE z((Bc&yJ71iL;+xHkyL<0+kq%=<(cDTRaFBnIa9cV+0z4jRv1$4RKKTmKm04++1cWF z8stzWH#!0}GZGRH@)Xo$j^mZ1?dzwD(PxI`ew?4*)EJ#m`Qsa)GC$yiG^zrMc5tm! zeEIS%!2!+4BwZlZTA>QLh0Yn+lO8{QeC(lEAaWpN_ak5l2$sUy)w^Pe8^8#$T@>%g z@c=+TTsA~{XuB8Tc^{-o1Gr`zTgh_1M2z16Lyd#b0edXb7-WveC=Cc>IO~Q`z{#Ns z*Rf$~@EbfW@N5byDqu4TQS&qusrA9wM=9+cx06q6(aaIkO{qm;LNSH1 z4|&*5th#2Jn8)V*M#j+4(3_h)*<7)Xd~ng?#=SJ9Q;HJK8-CLYY9e z8i2D2$R4~Ev#?RwHx!iL^T&JVwNPccefu(S5&+&f&CKv(;8uiT9!fW>eT8GYU_a99 zD2094Fke&gx`_YbORCbLN73;>Ga$tqoayI*t0Tr22vzUrDLJr)F++1N&y|AZxqqzN z+?XzI8RYZLdz~a0@K#GIG;z{kG}VjIt=^A)l~vJ%Fb6sXX^Uu&%GNN2-J`ID`AA-kkqAel0aJn%Qr?!WTd8XdzztaP z#Hu$^JFmekw`*4**wK;vkW#~ueKl28P!6vb>Re2$+c9qSVGY;7u|jDvHm zBcDHC1da&F3%2Tz(rR7jguxwGyD}q1c&?1>XVMhnnH&Ex?$h3=1%=!CtU9CQ(PCdx z^C?+DV83P6u%iX4exR_%jH!d5`s3pbkgG`-0bQ5zKw40+%qToPY~CXKg!0v5PGyP4 z9LN-~#*LIGc$)SvF0V+ZL#@pSmD%r%5u(1g@@MMUa#zhA*#ny|Lc_>}cxc(AaEtJ{ z7Ih1Gd3o;+4d2?_P7KBYj>+<{R#Bla5^PkHg@!{jOK!{{NW8l?HfNeiv)(l232(su z;q!8S`%;Q79at%2eZ4wiQ&{2cr{vq91CV8EB9(H>eZIOoqu!KxxYLfl9jwdrbKJQz z635P-t*$A)*3+nH+KKTMQFl&1wcE(B#MYr^B2+J##L78#k?r7>gOeAo+&$5-T ze&yWL<<7se+E4NcTxGgr%D@19Zu{8H=qp9%od&-S~wDRpbW{&YSG?&4IPd$VH#G#~BRyo8#OdrJfuxXL~s z@ttWslRhe6~=bDSRav3&l4Y8oMbl~=ehjl6A z!FQ_FayZ>Ic*kkaRX8Qp11fVat-tc2*!GQL>K=w6Eya^-Wta*;%>mc#t;2wLP{sG} zukUF@U#2KmzTUT#B&kN}SEl=uv{f0CxukU7zgo;Y7c?L$7^#8uyofDb)|$2J@U6w) zwf1H>HeE|yt+`kBf`X##e;-etroekQKfkgngZJ+f)x+bs#kN=VXP=Sdbw4cUwuB{5u}02rX`a)&K0E3xd&ru17aup|D02Lu@~Vf5or!H0 zA5}79VtyvJ*A=qv;%jG*0iU`L*Ovwla!ozuuqHkD$~Cfxooy}#s?lLpYM~nE%Rps7y)#Lk^8+L z`HhrXB%=3JP=Yalrw6tm1hAVCnJF0wKf(srF1^k+*fsQ*?>o-i_Vm82ho-@aKSGZy zuL3`k3Gx;5SRP48ba#tMj?`2Qx^?D$=5%LmJM;O2O-5wYf}Wm|5<|pUps{GCPPSEEj5Al=s&w&g)N4 zO#*sRXC0wsfHbtM;u9;NctlP%|8tc!^o{WF0QZBR=U;nJ+=`WbBw>nOpeY+&sf-Rc zYS)I_>^RUgfZ^-cvQ$z%j@b&DeKoSE2=pF6i%Ot>!K`m9h_bvL9DD?lA7nSEnJZu; zpmdBO5J5pfMDxKONoF%*m_L4mHhBk~fc>8ivIL@c9VFoa*zj(-Tu3AQYix5(P@T|6lGOY!0gOEmbvnu`b5 z$gXs}5<`JDh>HgUql6M$vqsC_osf`CcQv&U{j1Uew^}QdSYA=ld(?jgJAyC_yr{Q5 zOAXPgeh2)jCmJ_ap(}uFy722Mp~9oe4}D}j$mw)RfEjK#EbASpG(>lheKjF>;0X1* zHk$XP>J4vW>8HvGnD?X9NJ9|_*bn#&Dz{Mibd=mb1up_N%U+-ibS09LnJgbDv>qVM zqpXdx)n>9h)}MXP*hMSDw5sQ!^s~=b%JNra?8fLB{x_YA?tGrhD=rAp6>`3@@$>Ol zQ8nFaApGz=(UP#s@Z(w}8&dhAqM#O#g#hIQ@l+tVh*t?ZhUy%B*Pvf8hzx~NRaF%f z-oUUhKa5S};fC!%=2X;^5O7iTVZGhU#+~5Q2VaXZ73DBhG*<3D zPOb%-7=f<{SGwC%V{`&hzZ0r*FJIqH00X$vNq{O`n?>pm+J4G3!{7>d7mzS&*wnPd zAf)8Sm7uKKO0o)9GAFC8!TXjSiqp6eU^EK7TY96y%(%S*@x;69I_Y zgN#DwpJZ6$&l~BcM#MGZ$2FZlIqt)xm9S~zYN_#+dKJL*LO;JqLH4VjKi4h`BXlMJ zKU1&GMKC_euG21LnLcu4F)~#zj~zG#oct8<(d4H7JKR%(E;XX}KpOzD!t7xW#p|E# z+oPaw;jsy$GQx@53IrCf@Ye7Mr?iveNP_B#%V^`2bSkKxLVvlhOZzc2Zdg(KPG? z#%B~FP@XYo(Gc78KP_3v8o;8P;1{EI249YA(61V^ZI-Ux`7*S1*`h^{p$5-%LAXw_ zV_S^=ZijQt5WS1kV*-~7#cpgf6l7*;-U6!OFR%%49MXc{tEk*fGp$L+n!w;oY63V0 z$ig%#k}Nk=$Ih-?5do?%O63=iEsQ%+r{BHy0$s$V5VoV_uV#(7o7g z`&Ft=cg|@Tw4aKLG4XcLlvez{dcD#j zq>H=*|D(N*Vp>RIXzGrDs(!9dwqgN2A3k64>k9l~%Imi~f#>rY?ZTi=k@A|bZYF(h z?fRjIs+Fmt9$$z1)5@yTJM#QB)3gDoSEio)T&3svtS2ro?+!z>&A?^wU6PWK7ONS*Fd4hxR5~OA5~Xis!}t zYN?ph{AN2oZTa_e!VGeiQc%!M5iF5mto#})aq6*)_syu({a)3RbyM<=CBL&7Z+C4U z7q}{xzTl!~{kU#bpxBEN!ONPprtV)Dm?}zv>_Fxf#M~P~ZUjpkna3Ib`G@uFq!=<9 z{9yuR3Z`y+UHtDCWj^g!xY)O~I5?S%oXuB!`FIUMb8RGfjKQ&+epbMiaL&MK11uaj zZ@x!kW-Kt8<^e4IFCU_U|ir>c292FISn{R(N zxKx2000#>~F}!cLh)Bytu_wo#dSbOeNWxlBpXYaa%EIZFt0PJa1rCb^NB14S%Gwx_ zmBkKQH%JI^O{Mts6%pOFW};AT7A;v~`a1(nqqP2YZ=d8{i-|$=0%&RPH@e0(;_71s z%f3tJo%Luc<6u3o=+e~itpaY0m{(r4Kt%Zvx006o6^nOBDEr__|HA1ech5ut%g<>GeGgt^RiJI-qlfGamlt+lQ}w)q+Fm zkE&{S^WDi}+dJi6D(1#a6}!Yk6H2ZhsRB4HP%h_HlZsv-qr3|0KupZ=14l_Y<%aP# zXRd)V9f#?4zmmq!+L;b6$nn)Fk4-eFOX04NcyH8~cu-)vym8XAw@xb>qin@vs^`{~ zO)VSAs2sSzQFh_^;I}}D@B-NQGgPaA*Kr#D^*AYM`SD5&{6U~wzGjVrr6siWT`d>x z-MIrPNEUt_Ht&vS3y*>sf!ex_ZAnbbNp0;u=1(xFfp7v+U&rj}v=3waBDP>w657nr ziXgcmGHT0WxH9CFk=zk2FtO?EM^GAm=MS4_)C05@&NO5<45CZ*oCwERDeVM_wyNi2V@Z0Cs)(*fAF(4hkKE7Yi+~0*!!}Ad>+`p`Sb{15t;1shXoA(_HaPdkfmp zos8I?q#lm|8W0xNyJud{SVt~Ov020H;iE?-Ox{&46R+#u|Dq)4d*7Wo%Qfc2t#~OJ z`o_trSLg3}*GG#gLiUC#5^)Qv`yHs_6Ds%?4QUaFv0b}%QO|efk4$v_je>qcqm}`D*69D9a=VzPg-V8ON9ei)vbKamguVsJdobZh z>#o;azagwvvD`s&q`t!S;#*UBQLmJH5-FBg)efy{8f zWZwii1a~n|TF|kjoT5HRG;E#k(xlt~v1v9Z*lXw43lojr)()nDp+)8L7!EJ&l_VHe zXqXfdBP0)SeIw6YYgzNT;^W4;6ANatxx?#~D|))U0FiKeGHF&!D*3wfmxQ!h<-7`I+dgA>SZ&HDKIq z`v?Yd$e>J_trpQcc^toiKTRRk%-qaOAAg5gF}~RMv>#}=3%m+Mc`DOrX0l)7RKQFD zx#LX!<-O2qbm37X7y$1jDJIZ<;GO|@y%2;LYh8qyUr<0F_l$71Y($bAC?zs{+XYR&8d_c`3CY(Qnf*{sNt?2n$%b-Z2~q`e^Jw zJUsx?KCN(!j|Ewd+73GaMDJV%5QeBA=U}v1LIOdW*Kgludn`u`gO3Of0cc9FI8)~x zZUjs>s?xU@??K(knE8NhLNEl?8CDENB{Z?3eUBOY4aXX*Fx#6C3&pIYvI2(ir^cWQ z;eNoc@My6?YvDh~_ku&85HK31{Y`9-kADA~hkEJ4@2Iy!8HtrYvVTp-gTMxqNrnJv z$CHB%My-@1_xV&*YGKMmQ9xiUDCjV_a{MJQGMYT7BE(1;w56e03Q+(s{5G1&3w2($ zUVLFx&JDbck4B3T9y2f(n(z_sWo&J2y$htpRt5NBC@(Z5C=z5{fQxyf>uAw>%=VJ2 z6hVN@vFm#ZXANpt-1UD(>d}NU7?G#oTo6LvI5qu2k9a(8Ar8|gD4dZY@TWJCw1D7e zp&QCbvfv|J_UZxSk}U&_5T8(8V_eG0J3a4<)8DtG1?*6Wm9HLmW6lc@=i(KAt>Rh- z4VRo%kmDQJ^Rlcu2jJ|v2wMdW-yowDIrhl$MZz4u#1YiN7&QlwB55dr()j;NV*} zzX}IMPJ@Low}&S00oQ`2fl06fzR}*ZS0J;3=^>?rhA6@`K=cRFftl*{4}*1)HQm@X zyofeX@M(xodAXfMPIB@s2&l+B@6hn(m)QtH7i=`y34A&CWDB=osySqXqE;OQF?}N= zFa>qGwa5evAb1%XIya$AMk6($dfW;U%Q<)qdXN1U~3F=WPT|Bt95;A4%|t z0r_Lb-=m0#E9f29$$&}OaW}?PA(LT^kdF|dldw(6Ck(a)#hAjcXtBT0%^_8zPDNG9 zJkiHo1g-}C6c~SVe-@)Usg362j zNEuvajmE_}C<+f|yxFH!up~IVrbpdI;=ZZyqoHHR|2z>0XwGKJI zEu*5s?euFGbtNN+(GWvShtjWzkOPB4q8Ky)I69-?C@q-i*daOg?KXmU(&e!MK91eH z_v{(OOhTGDWS;w5p4_W21GyLK>{ng|8#nbAV^{&r7o@HM&|jo|lD(2FW#LnQ`2UXd zm1A2|JH8qj2P6lWcI5q80XqZCpjDwm4>ogU7%N4l$wC^Kugj-Q#Tg$Y{Nc+Ff5_F z9A|Fu>tFlCX)GgD&J_wzi@)C5T({Yp639)KpkS_E(BvuOqr* zrACHP1~!DYpreqHg&T9`M8Jt)mjYD510`8k@cq?72-~){=@KXAK8uajLkUhukBh6( zAe=^@-@mT_@e|5)R6>Fu=9XN70F8O5lp8SEl)gOXGQr$Y226s{RFCjHNYVqT)#&_A z4^#nlgXY3n1qw5mL|~ej10Ul~q(3r{;xYasDEJ+8XC!nZ8sPwV5C$#S4N_&sS7|wA z%3s)=duTeqp61KKKqyhTA-3c_J1?D6-Jmb571u#dDStm&J!pvkM>B&DU zDSYBy8sJUb*Zn=`Bxg;oUv)9#!9mC2SOI}Q61U!ieUDJ(tt+frsE@`lgj8eY5RR2A z4`L<8I;E!%0}LW$AUC27ri_SQ8U~A}N^=*my}->5G*3_6$H&|DHXPQ{3M;>$zKso~ zKiG(N^hxW(drP+fJ`#z5D!78^-^N zhAabYU%M106m%NmWga>pI*5+Dch6S#Wy{}Mr)-2{sMz2ypd;G89>?Y2?+9P^4&+x> zR;{KChTlN`wqz;j{wZ7oBOC|E5qv5@XRo};l%qj^!}a(2@R#Oz7(;U0(7##=6&MNc zpv5Y`e36C738y5WsP$Mnssgmj+>y|WG3KDzT2SOSBj(dOVwVA$6r)S9u^Ofg@!sjbt@+M4IyLjZ33HQ*^QIZ;3h3yECF$X{qBRFU- z7WF(H0dywFi~+E4RszS+KE)IhBPDfA<)s3F1v-k z2}Yo_fVj(WJd1!xt~Q!?g5tB@(OsuqZl0r&u{{2?0Q4%1Cu48lhPO@b7dn&(BM#OVC2vb zlDVb7?sunK96#>cykjw=M`5GzRa5qwU29gYg3^s1D;=CVP;hQoM?g_ze@hn4=UW!z z08-os6S)!^3IfCYmR{#M7mbW2LxpE1a(^~G!1B~MSmFVO5GrIWNJ=Ouq>d@r3d7+r zybf-zO5XbH*_`$`cH5|kxkO6K%EO(~bmsqbjyu?NQ~HZ|Iwg;p#FtTrVvKzwjH zC(oWe3g``-gp7p4O+zk_s-qEOF>;4ysqzh5U?b94MulTFKdcF|lYfL}b*)qsb4TE9 zwzz*K!RnW_IQUSE81RO@8)bCT8G5M&BDA{3!OPA`HqLWG(XohqO^z9Ao`W1j#(&$A zzM~e~n(xH=v%cGTys}}p4*6q%SO|TBwZ^anlqRGCA?V+NK8A9aIxJ!754!utF84GVuDoju))zGrire`09Rr|(BpIf zCZb|-@)W?Yp!%2{Wq2If6J~ljXiX|A*AR(d(CXdxyu0WkTwXL|3a-W=R-44ND>t zIC4U-pcmEIDTd3?P6&D;XQZ+!KDelrM_M`@!2@e~c7oX}3@soi1yRJF$yjF6^7rh= zva+X$YUG5TKn**Oc~a9P_&S(l*w@LLjD!ZVSjo)H%)4MwO`!0V)fM{N&$hYazFmHHnv3r0M2A)D6YCwjB*Q$MyXF{RmIz-tx|^oL z;z!Y9q(A@luJYBH^98nT^WjAwR<|H+J}Zm^!>lC&DIlJBQAWW7wG6=u)dQ#*KD2j{ z+a2CHU``ZBIs~{ey{pNg6B*J_ATi478MTeT@r%r;Pw&F6LY*Wy3ea1OkHlAOdV-^Ok6_X|!Ru^7j=O2t*_@Ucq-xMm9ubuRMdh ziK#J=R7e{~!UDLNG1?sU1_9NQzYkQ^N6jW-Sl;jeV7L!#fh+A!^-6-5A_Aatk&*|pVb_tGj6bVYxR%=6w(Hmb_wd#E{u zdNRpM3xbo{BK(IFxOOK9uCA=C^kCL!u?x?f(ThS49*E((XfOH8tGDwFb{SA?NIlwg{<|FeCZKo+Chd4Usa-dztHb&cDy7`c4|{?iSyTOq6C z*fwo-_6j(AVMh6`ev6=qh{e3(-mzc6yk>J=A7Ybgofpmj%cE;uf}^E&6ow3=Gk7cJ z;QJ?Bn@XGBmo2|agu}PT@2-rLQ)@Wdj|*;V-zR~ov^e{_g%x{fjZFTZt2`%rM;6Vv zs~#8GMO|)O(06{@*QSnopJ~Z?wPCC;9D9!{)y=-)RIjepcJ77Y#=0<-g?n9J&f~W> zSlL)6sk6vr#M90?u~}a^u4qYJBdjoHcIU78OdCI{E)Kg!$dOY^!!j z=)wMU3E?L$8Ji|0XWuW7FPhEsD{#((|2U0g&%S>p*zNF$FB`t0K&}yK`C#iaGjd%; z72HHUzTMfW{4u?~X*>_+1$wE>Si3_0ywyKvcc0B#EN|QH*>|~w!qeA%eyDH#7c8>Y z;XS>u^b~BLxxYVi!;(7b*KQCMKjLmJbIQ@B!!tl=<^|SXrWh25@zdesFHA-TkY(Eyx z=hw_U{F#Nz+sDVbZTm~(!fiXE7R=no|9R-RHF>>k(xOH}57W2x(u(udKj-gL>iCm2 z?Z0-XnLAoy|5htc_hQWcxoS1;)<(0IyQ^LD>Lq5a6+Z2A%hZRz_{8n8bMSy&W@eM^ zOI&+&a4=gl*1U7#f?)Ju(g`Eu?nfRD&6Da!me4hMvya&sES*=Y7;+$7@Ig?{Z-$NP zGcPFWlE<8$eBGVb%K8-2PwD1`sjKr0;k|;mtds(7E?X#|r)$ng@k-3>mtnHp_S3f~ zAlqsGRpuS_oJz@bWm%<~BN9*in3d}d3PbL&%{+Rq>RZ>|y=F~|muoxXRw*8%5GFhG zm75ZlI9>mlyc**Q|EnH1TFjo4eThD1iHEq!ezsHjS-ArYtuvR}uj3x)^l!Z4SFK0N z&#DVMX042!Khk{jG$fir4?HP|(1qKL?zqdbX^yHw+sjCGyhfAj{;%gVx3{D9?zUea z#d=k1Chs+`ncLclFQhv{ak*s2>UFvyt1mbxy*}eXr?y;-c@&e7;I$ zyg_Z=hVxtx*p{-_&b+T$hC#F5KW+z?;_;*Z}^(csb726f?btRJ^U+nyyN~()S-DY-%tf#{}4X(rLjGP4vBwdZFifzAN3bmX& z`@3-)!*w-HK4u}-bGCh1dF9YZ{^9?A;^5ZiFB7@uH8J&B8YgxqOY!OqHLM9A=lj3F zfZc6ZU2LLN?qbtXtQG3;az)(_ht8LQ}3vA+}d zE;BwW+3-zkP4VzsizPG5C8f#P6X2-Nz;=vBz$RwqNAI5DVqEO3kB`o4(V1H-*gbFC z1Dhj>|2cVz#_gT^ZBn;ARv{lbTT5z!y4K9Dn*R*GR#KLQ{Su1Q)L}ZOt96klQ$gU> zGL!vRZ4;kx+G+jw^j!b^;ZJ0R=>gJ%)!$bfyOy24UHLz**7Fz6)b*}o)H1ca#+ZE=-bqkj7Pv>6{*QB}o z(2^Z9M?}y%GwFVT{R@fq*KQIVU1SqZ*^pmz4_u1oZZd@CbzfuwA_{<+yK zahQ|6miu`$PGPKK1g+=mU2&0-RyJyvk_BMpV4C)r{P3R_T!kjw&HtN|lggv>pWVTF z!S`2{ffS3iEWErD$?a3+PyRQeaj}QSmR!mB|B(v3@tTR$qZdj)-97ll_Dz|{`AXo^;BWI?GAvE_X5cP&>_yR#aDj`_AdAu%y|$q6_Wj$)r@R+ z_@Aw;<=(!?Thm4F+!#Og2+1mtN0n*${28UN1?9Yc>#J*VXEXe$|EYGp|KtJ&2D7t= zWe@zDVl;@^0a={@=rATh_X1-HdUp^7WUI9}Ao=zyYS1Rl*S4s$F&ms+#^~WBCEhqK zC=B71gl1SRsU3$w%kjr1Y6^h~4iM%(tPs$XXaoQZcSperqWW%dot`8?6+Ii^4=@VI zsxPrKIbm|3VNK0G&ZUfS1u}XZ;0S;X`)I!I3mxH@#-{>Dm(+bIPM6=OcC||%jua6N zn(_qQrm-MnyDLj+WfKaoT#&DOr(-_x*7Xy-3T(3 zndV7j02DEBGzu0DdyI>MM9eOvkCu-Y8n_ndI2zJ}uFRbm>O@EyBPR-`F@WHShW3TV z!vM6LPER&Z)8jy!8TG57rbiKT30^p1D2%9vi4udBMAZ+N7JB6^csy!y#(PC!!3zip z@y4VZ+%)w*LbF5rA=Pw;@aj5!>uan9ju$i*I_aBzcTW7&tmv42`qad+L6LZTNJt4atb^-;o3TC8f8Pf9Kq2sSUb-^8>ncgG7v1P(vEV2fZ`T z7{plm!0>P-m|sXfK)HPxG6=MAnyxySi`P3^tm)F^N9_oXYH)V)%lkvS-8|V|&s$&4 zReef0dBAvsti(SzWx!6swt+^#TC>ffqQhQeuw$W9pPl_MDq$KKPi}@1I8@Q72w%B> z^JK3bNT`{Y41g->r|pRUXbr%kH@Jt#R(wUPGXqUQ04YE5L&+5er`o*Zv#qZTXE6JM zs9!YK3zvy`fETAR7c@v1Z}vK3Z2XV5>*Q|e5PON#!8;YuY(&E`60(18#lnFg0%H#5 z3)V*(4QE6HouNLE!GxGR2r1W4`2WWLrN4sS4yOdFa>VY0GHxr2B7 zIZCNUejObhb*mRk9Wcm`gttk}SVrl%-rvbpfh=oz>#(C>Ipf{1;Z$237nC>yO&9_hfT@7JVGA|#m~g*%OrSq{dkl??@Xm{d zdc0r^X3^UikcDlb1j>%298B9XK;s`y2Tjm|9RWoa#*i%ZENbn2{4!0dyiTLW3gQUr zIB50gw+QP+DMCai;B;>Op6 z_UZlCMou{sZ|{2mMTfi`)Zh-c0j6h#Lp%iEmPzlenbi)cVHV03dJ~i+f6qq)fdGUV zdL0T)&q4DnY*+e1SsCp=@AB-};ZI{OFlxjf^ftOeFt23}HSO!6H=l-&gH}DNw*%-U zTB=3_Sf_0$TVb>CT~U`|8;1DZ!=Ec`BL(xqJqx=ZFZAJ-=KLRGNf?ANlt~xjGa%?H_f5|`EJJ& zgZUv4UTOEkh=7zx)9S!0!UzOY1taJSvX#Lw!9cAgxEag}I5KMrfToCfo;2wl1{3Vt zz+1Oua63>Br~wg26cZwkgL$QpgpVLizqj27Zwms24`hOwV^_tVqN5Ob18l~e-Ch>6 zCafjWh~qVTzx+S8-UFQL_I)3(jErPv%S;L-gk(gD%1BZvBqXvWSt&DQrX{J2h>EO| zMD}P{6&hw`XYchtZ$01d@BcfFU&r$tpU;!G_xts}U-#?2uIoI{>%5}g{~vcI2)~2Q zwADQbFZSL^Gk)oen;xTx@Kbk*&kfc}^}m0!?hDqnlHvrlW_evdTS;@u<*lmAFRhNl zWzWz7cxi7I5X>Z-dtM^X0WDoBlWdlJhDiV;V6{jrMH4sZ4mdyb1&;+`jfijjv~05v zzS9n9tf##*F7rSy6`BZ-VR#}W6S{hAwV@%ghxYQpMuch`M*mJNAv)~Ga+#^d zlOEeoXy&0aE4F2$(IGspCqau}xI^h~W%wp8#RdV$83PV^CoeOr2e<99jg?fo=WP7W z*HLfC=Y8CpLbZYl=EQRY2TIIsxy~Zlm-MJO@lW9qHQ@BI{f~=cW}u?MsolIE=KnSy zIf)($z!PT+#4R|h-M+1xb1nJsBYdquLpsyX594w9J&#D4?!$*Ew3jQ)`0SrOrc#i! zR?f@aqoNkryZ>J!9AphpUSdvUg*ma${h9C*z=OpRf;yZRpjVy29zOXW7(&VRv_Wc zo3FuM6oecmmHnOQ(1%ska6983(wRItj0Xe#?`VAh7rJKsPNnzwO{gD`<*fa@B_Dv~ z$%~y#c_t~Tre8Ll+>m&^mCPL);7gC=7t`AS%DNFh0=clHZhvGf%^BTUM zE;f8SJveVEedueAh+qt5b`<4VhBe3Y1&TtOk24=0Eyya|>M*>4a>CDPgIf9TeVzRq zjvnvW!%XPW1-R>4r;#F4ijCq(L|$J6wVKUD1b>;d0^<^Hsu z`(+&+0!U%PA|kTwzsq&Z^h<$XfI>|C#GQ5@FM4|7n($g3*MX@j`d zF*1^VaWwCeF`O_)U0U)XuCBekVu%4asyf`>^!4}C>9+1f+TC_}+T@3WJp6v>=^B$X zBf+`*VcLwD*;PM3znE`n>06=2$KOMr6`r1+`|@RC`uW4})-y0T!x-3j8wVIb1Y`e< zjBRuC^UiO6mQ3Mi4L^VC;GjZIyZRCCZA8R+WGT2z;t%<&ni*samPaiPstx=vu9viS zd4!74l-?Bdmv85Ks$i0&AT7r&S*SbeM5Er{TM+U7H-5Kr!%6gYKm8K=eQ5okyb}@- zgJvNwAmUkgVL{Vo0cF!F*NYd?FNK^BcB6x4+*U zl4pD%t4TAF%A!rYWHBK@kV|Yc)<9@*+3ettKs{G$HKDDt=BA?#H0qu!6JOcnPn!uG z?^RzLn)m{SEl@hy*xGi3$RptfM)uIcLe2(r3`$(pxpSKxUd4f!O&Cx#&Ezm?=Hlan zlN>q)h@KAG5%NnY32+y{LP1%CP>*j4nwS&ACXi?J?ySEUk112DzJ2@Vmx!Agi&X#% zvKM=9yr-_pQW}bOtYQjuPdFghx0`C~=`os^m=Jd)&{QW+o#Nu=?t-*~#78i+m6er5 z&S!0Fi!KbJ!2y>?`ZA%`DZ^-BS=n+pcI{~8&k~?1-4e+x$YX4Hy|@3zlLFa$dU{!f z0yNIZ%iQ0#<=NMmykx_Q;}U<<1} zjD&7QMox~T$8vF!?gluV4k5vP`}S>b!N&X45=dbn7E)AH^b=%APv0M}wi?41iRc;m zzp}D2nubuUuO6B8?(VMxJE*G4|FG`bm-C>OkO$exA!YR2@#0M}>ra z1$Y80c+~JLCft$IJg1dxEbKNGVR4$$G>xqsxnUnqn*DlxyrZ1;^;NapN9MGWOaZb; zG*BL*RLwtB>#4M{(A4p8HC3&=ygW&S_iJj7&M$UTo>9Q*lbe^f1IAxya-&}`I6+Nm zk6H@9isS<~BNF+ySj;yYx85$Z|w|+hK+hSjD#O_t7wdFZK3DEdS1=4aD zrzxrn|FYPc9r?Aqq%`-X>8!M*l(&;9-Da9{VF(?M~QW;Dza-yFO4 zqWG6DUq0eAfyNM5mT#Q#NbBNCGc7UwMbD-Gn!L=+?P8`EO~@=ER2cu}MuR4TSEvNU zxr>X77*wtzrX=ifcF4`kY1@xI&+HBypvC<{@{WU&jC+L4t;8t=wsQ?lO{~xAETzNa zf+WnQQ8v0mKI9Gw6PEJB{at9T*-CDI=}(7O2Jn~0m&VZ+v&c-3Gg!N2d5_}79|0eP zd7hVKsndyucFb*8lt&-agFe$+H&)1DOH| z4b!8lCcHHgzzMsP&yQi!Im{;*{^Xr}UQ%L#Z9-;|RG6Uub=(VMZX%}ZXe9%PcM_s5GE@f}$KRa844z0Wb}!9l0^k2meEHJt{`5&E$}^#fnynADy68F%3&uH= z+&g%=lXaMS`gffgQzObkvv0XJu9q&YP3@*D&&7ts@?#?~^T!OJGP0ugl%eR_>6-EN zs^+ogi%`1ZuHVHdiv0`9Yj0OFC|k+J!Q6b^?Qo5E7QEH`c(gDL#s9bZ0As*_3yKA5Fzf*(nQLLI$=X z2wV=Ycvc#>;kNADCB;2TdCL8RO`%Ij3QtM3M>sE|D!aR*TpV$QvdY<&pC5puJV`O(yaKb}QxSIwR`a8j8F+LAXw<2LWg_jiKFzn%{=vpZcv5d8uKq$Yv?D@k?+|45tx3K#eJ2f_1NjPAp9dH0Kx zM`L zsQKEXas68KbW(#2v*V`xJ{2DV8D4+h5StadA?OFUm!k5*P}KF8g8aj2GOSBYP{SdR zMr>3LtuQ}>gA)z*n?w#)RaPP@B_=1A*E{Zx(hSJNXU);Rd09F3N!0k_jo)2#->`KV z{C4I_GV^S^V&)gMaY3=r_1(25hu{9!FGxM5)132O=OtTvLFd_nGv{>E0#7?~3)TJU zdA8L+>I1ZB#O#+0sX|-HZx>=hPtZ3U+6@<##T0ky&_6KHzxik_tSLzsh()AwB|%;t z92}%hKk^oEH1mQWm1v`hlhyFFo*M_9pTPYi<<4Ts+`@5M9%ZZymM&*AU8$+Adu|Aj zP2B2~e>&;?W0wl`&8Eelk{lVwo^yD@tH0JaN{x6xx(FIfDL`69;FV75Dgd8yEgO-FU3yqDd(swv;Il$!bo`M|Wp*ZsZU-+W3TW??<@4Ns#?zPUA$8oT%8_`cETfL+v`srUtTYjK@dtrEav6u`nBy<5mLdf(VfRMkxw=c;N z|5!3lfsD);zC7$TTchVw3XNJFEU&_Iu7v3Gcv9b61Vy)SBHLcig!8MNt zs;0k)sXpD~DR6q#tM4Ci`jeT(yS!(l5be5wZJQz8BsB`Ga0zOj0c-7?An27=-+5mF zv|JZ5+=uf&dN_G_EHDw43=@SpFY)ptkrhFrwdnG0jMIT(b6n4St+v_K7C09>%f1%hGq|ANG z!z$zC&Apdn1NnwFRu-zH(@XF!&-Nh&BxeG4!}1*rS0rJV7`G#iSHOwHy~B4c3Xrnj zE?Mw3EP@&07|e>Dp(PxLpWH*D?MJVG{y#$nqUOSbxX`PqYTnoQ*w}ltq7dUTNgajS zu&J@JR8wV)86^|iB&2bJEMVd8qKxCen)hIfPjQzYL)_TMkb{$4d)3P@&-$B%mg$q{uuGHe{_VszW?(4%Z>wPPOd-BVGh^cvZ&nhX~^GDO^KX=@!M(ZR_9)w(C zhYi$F;&jn{vet0X1rk?-o5IP()fw|}L4>p#5bI=|HRxX=iN4BYnjSJ5Rs;1{rX*njC#kr2A?vU?v*#Fwwfe+`gcbS;k>x8u<*oOZ?&;Xe9!cJFTYHd zXG(l*1^b{?;ObYEf7dg{MHDFH9656~d{>tT690k8!~w-)Pj@P`KdrH(3tIgoU2&Vp zyWwe44h$V(+ay-6BWmDu+R)Hjg+D!=dOYCo_23PZYQYgHHmFkJ2V-8{DzB(mo2t6l zrqq%J7(6B)wlMx&V3w2owbOVas5pr3yS2G|V};7y+clfc8*ECvbYiKH zQc$_~YS}ZVSta^6ii?F!N*`?JH^{PShw5fgw4HWLOc5B1_kLm>I7-h?$u3S)UK8|( zaYKgh#E6UUVm%deX`LXFkmUFi>p#{onK{?lXjD@lTe$b3VT135>!Bnu#D7fRjhEcaXYU>_<-24AdrsKv)=t*L(KbIO~}KG_pX2dM=8trhRGRliys zKj-D$>0jdzi%cepDtXQ^3i&{kYQm=TNwjsAJln7M`bkyLMAmX3qtWFrz5PU|=d@a7 z%3rz-*?mjH2QTWh)lXF?N8`c8KU^Z8Sn^`BN^R9QwrcMb!c^O*3c)1ZQe%?Vx~nO7 zGi0Oq*%{+lw(K-+2~!-NBu%0fiqMo3uldE2m*!f2vtQ%i-=^t?ZP7L0Q;FWuV<`%l zCGbhljh$bS*KMty-0EzZ;iPs~$=oXxit!p@VK3+6~oTe0)3xwK=~Z zeb7HKIf=1N7S_8yXsuhf4nXQ5efkhZTR+B1;ehNO1-%4~w^n)t0y-LE;d{NuP-Yjq z5mSJcc17IZkoA&G8@AFI1*Gg9e)8$=_PL5fhL7H5L|N!-eBjn-LUBBADQ|HkWa8Qr zv4bB}4_>ce$$KV16T3Mm$0_@3ZS3bw=bIX4Vj6#~*-X>dooB@#cXB5kUrI~{yD$Hv z@2N?>-25jk6C^Zgx1HkIvQhdLXAQ<2gk}ur4}`n5Y3FHb@#-{L+t{G`S$C~W@L`C6 z<>rKM;tSi)NKcskpbtcOKT@uX`~r8_>O*Qyl1X=eg@?qm+y}0&>sJ@2B=wE-1_|CP zSUmdc-snJ8;F;scIRHgLDce1JF&-%eN)X^#HmqCkr68doy*y~5VICOzi&25EV&dX< z|D8b*hjRl%sP4Z!a{%QcVh7l2|E#C@6{tB;ysZ=}zg>1nN$pvwn^_f>ULzw)9Pm82 zm=6?wNuQ#U0JBQLS`&OZwxTk&`^&> zrJ+$67z}K^diL_qF(EhTIM9Z}0|hE+^&$f~N?u}#krV@=CWCrs7Jb8q@9_Yo(huqFxT&6AU z(`s=%5}Ez*=$8Y3B2Vdt9~Ltymr;!FF1h^LpCe#`rEl{Wx!#`9@V&f!XSue5`^ta>xAdf*;dr9}Gl#y2 z#r)$EGDsZM;|Gs^S z4CM6y-oWrmttt0W@Y{*TL&Uio*0RorKNJ{e)ch62X(-Bi>26Kvs{nQSMrA#_!UA(X zj=ZfjF#%PXs-)QoQWi3CKxy)` zL2tn)c(TrXYc8rplUvgIb^U#PE8Pq9r)Sm>0+8Szv*nvrN`SXOVBkeg8R$N+^kXD# zLGt(a?X>9){Z27qT3Sr6h_Ol-#hHvu0dii^z4 zL$)jPm}WZsf!zQyK0Q;)k&karOdfzq{)XW%ji+Bu2p~uZ4eE*)-WTJ1e2?pSPrS~M z2!O-wBD2N1M@Lkb$5qLbKzc+-ev00i@rFpBo0&e7PYYqsORz_fuZ&ko#}KJ$P%d0{ zuER(ffT2h&iLDxVx{>)s(14_k*!0TcMNdyp^#8N34HU5Ci1vwcUTdPm&Z@k4TmP3w zjDR*%=$h{*m3arIv)mr|7xTN0Pw#ceP!hiOxXGUO>tiW7xk}Pb3`}N*tR%6aM`8*D zP-(6=iIft_MMV$DrJi2wKTc?7=7A%Ix9})+jJlsd4S*nlCf+NZ@Z$RgUN;4J5@Awd zlR)}XkV>rFB-9YK=)A<1-qJo(DL>+_1jiEMc_b-G`jNUxUaE;R@bql$aqum`&Zw%+p|JssUDKj$1^;bamEvGH9+20)JRW)@^Xb%1X*l>ge5q7Nm< znUS{wG_7j)+bxcr&(WqcicLSnl(LG>DY>pHQ;wlQy4PYnYX^6&0jf zC=bdw`B>~=efLxXGw#4Dp z1D(w`=Ls_-sOG+M9LR1uSewpV;d3Xr|Dav2iQi6O3>eoLHzD-O^I(b7z1Feh(zCq ztjx?-?bAoY%vt=O$SQyIs-~u{EpCs@7{G}H3>L=3_%6pXO+R8b$w5cQ?Zh7en~|sp zNKGu?=I&3NeEa*)duWk25GHNSO`-y2sch8*WmW6R%{@m)?DcP6P7%T0gsC z-x*ien-KrRqQ?(u)wL8GvCz``dL}aPhqQdBr0g%u_KVt7J|mf;7-?6j&YC?=C((97 z?e1Zh=UbP(j5aF1$gizkpc}~VKG?EJ%g~Si10Kk!MdAV; z6=6&hLn|$MWPvXiVmuCU9w7B0Rs;ixyl))G99Xi-WI1rvHSgc=T`3??Bk1kIzMDaj zThh|PNu+p));QzINlAheaWKR=PnrbpLd56vWB$p2BDerzSBNkZjWq^zg}6xMjwFBZ zMhJj8+P}(-2*OA@5b6C(7)ltlJp(vi7(AzHfrY~JvrD{;9)~^zj#6-Q!7AP+z2d{( zJ-b155FVaTj_DxjEWP3J#OZiGOK9;OQ@^2u-m7PO^&_#kzKj+OJ>>b zs)f%RPHp+PBN}=PeOn8M*o($mzIy)6j0Ib=2R?7$40AvI!ku*)00)^54pkx?URR6>kjfdu z698BOfeFChz3%X%KLITyJ;ydnke4g=rU+8`9c#!G7aV$5VI75%8EQ)S1EF^g{n|GW z98odakUb7Q3sZfPQE4+Gp5MQ+G)p8p$bN`zA9zVTUcw;tcvZYPA&P@SPw(DsTUS@M zoYwmz7BjARt3#k12z&YT>2*EtE>gq9L9De@D(=09GCC zP-POKDBc_J8*OoW9Y^uZKFF7n9^|B^UMnmt?4UFGC(&uO`vcRDDytl9W=HsDl>;Et z>%GVuag7IjVhp;?Zpp9plez`6v7tf#zHVwldrb4dSv1yT5SPv=g}(*$>T=5=%_FYL zzTq^PTawB>`0s60Nj)y2&$I1l*}kV+e(m&@scMzD>Pbe~F6TU499;CRDd?QJ8rXDs zmsaQJsv$Q-hweYXm9a9AEPSS-?3gk96}V{X=BDoPV9Si~`iBf&pP$Mo{^`EApT23V zvd#=M$&e(>co#hq_#!Kr&vGZlrex-jNwYtPL2*0(MEQQ((*lm}ybc}Fm(13;z8j?V zEm+gD{xdT&@dx$0tSQDzsK+R~G$`hMyE}VAKNs!cc~ejT&;hPV_Swa9w5;3|@z$>q z*|58GPrg2+&qn%!F|Oq}iAxAPfVEWma;DvtAp0X^M@amJ05-G}0~&|caNVwXJjk;X z*a$svO;xUSj_#)@+-#a;zeIED$p{0-+s@2{Qn?!~;iB&TeXM-KalvaM?;ba|q!LsX zd$%t~^1gmX=hVJso2_&=6`r$wyA_ktNf%r+GII5u&z^~W2N`dM?-v=q(0eucovnLO z{d7VP%}>VXn_c_9Yt2$rQ5amO8nYS@$UIt8z7v!WE?EoOLi}Oghk{OBnv*%Z#4zt} zek{!|y3I7KSB(O{mCi)ocQx_3sosz6AO405M8&}!zt?){B#Xu$2(J#;f{u?Zhg<7xi;AVdQ;*D?l( z2X8}znt?%+jg0ky=8HksuI<9e-p0}AKorrI+LTg9Dfqp zm#PX_Duw7H@87>)<4}21J!9BU>h*mmcHNs*^}+e!Uix=G)Euus>R!4foJIyOX4#x! z6ur$jZT;6qhLla(FR6t>s(}Rot*G9ZeI)Me)2Kj+A*p{jg+cVXh`qHZDq0INY>!^b zSvSQJP#)DDMo1dXZ1IVGyESg>qAN8@r^=6JfAkeoH7zj3%-$;PmhyY?hIc7lP4s3| z1%hV66^oZGV=}0iZ&HqLK6K12QNWV5AOj{)7S{Im_^i&)m}0d_HuC~-l=yqLM>sU3 zzU`^lbF#K4TK}CN6F;;TAFbjBtq2Yiqb*cmvo$F3Y|eX8^Ey3}e$?!7m?LP50iSRV zJLSzJ2LZi;V7G1hS^tn5R=url(P4e^!s>GMBbLYe4t_P;G^ng)?Yn~Olxi?-%Rj=L z6cQ?5Hp>e3xb`Lu3;W87Fi9)6h&L3h>p9NAR!hyyWfxao4K@W=VacmpyQIN#d?Jvp z)sE>|f_+HKMiAyJzon(R`F>z)N6Y6j)1(`~$(p#>B>Ml+aDx80HTxTl)3yaiJ&8E9 zHCNz!6z7_Mi%o03HOSE_C+gqLU=n?J^kJA@|HLVyilc?6{=IXxTS}Kgvc|Eg+VUGY zxQM&%@>dK-OE=+yk{@y|bI?`3&1RiTy%8Msv4z(x@U8*ZQChfq~O zQC`L8KgyA(WfPJpcJPIFZ~PVa_~^nl1sRdzSJE#iwr&N+Cb2)jZckK-aIQe~7;|O^ z5f@Z)qEJSn1toKE)lMDc5Tst_Y zXd-3P8qUdYbBjK0-s3vNxC?44geE#qUu0`gu78N%eFt@ao9BQeW)=BO!i_4!s|Q%mA2=2uCzh zDJj%CP3E38Wp%gK55ESvyXj!q#L?!pTFKsc@TEiCEjKr9;8u8Le2YqOA5tKMC5V>R zmeH4T@$l^4^73o^|0V4r+WX1_fBLdVNt1%>OY@8NYlFqi_e0ih0L-3|_a4?H*oT@DWq z{u&@W=ouV6k1nUDca6HEgdv#H70R*vpAIWVnzXOJ^9`of-!;GUKCs48vF`_zBKVn| z+^H8udwlTr5Qcz5|A&kK*@B@B44+X1>#YD^0u20hekDBq@7}5Cd~;oE!Jy{{3)+q-9ddG10SU;SC$WW zBr2PtT>=^_($E4;10XR-yNUc3ic?~Q=cL4DhOP}LtTiVEr~@7+iV+D)YikXp`xv3B zZurGH@(V=uD^6C}M>F0kB+miK>GWf~5d0O{78_51&kc15t1Qrh3 z_s77VPzbMrfA`9`D0#;e;2Owy$j*XU3Un)YXmijjfY&>|on*(Du}2X6y0<|HN2YJ% z+EIKUBZ48#N}=gCNb|?m&W^B=$Z2vM+f{IPA+j@v+^@7$9shxD96}CVS@uD7g?oyA zbK+Hq!mI^k8}@!hKeDq{_}};x^Mm1lM~^~xn2dZt`qlf_$I<40k(Zs zpa;o-${osn36Z5J=6id4VVniUj#07CWhn`Bh&}$}X%Y$R?1X}fBqhL-6fpYF;g#3@ zX@e$N>a}Yz&iUNZ!YuDDmjGzdq5Cfx8IdBo9Po{|Z?oeSq+$UcN01mix&%svX$e9z zCt@e~Pyi7q2+i;wP?vPOyatHPd9DfD0r&|a2VGrVJ6v89q7qdoF-(V2kTgBPVQv@5 zK*--LO8ls>VPq5)iPD;QJt04LS9Uv@nPN^}iV`h(Fd%ru*>C1uJhpL7DS5mO!G!Atj@jN`;^$B}%0^oY_ z;C^_Zyu1Hun)T?df(#7O$vIbNg-ut8Y?que;JV)bHiw)N+a7AdN)&ER3(Q4LeX-EULmfmqok~hd z%(VaFh7hqoG=C5mt^o^o00KPJcBCBwHjKpC4M#cZOjtX{0W!xvB#vI|uUXu`477r7 zig@PZfq>otN=cA%V5O+as8%FxYxm0H=HM&>U;(y+^9PItQIeuMFS%!Ha-Fp!G--U5(|v!OR2!8iN3ZPo*JY(O*_#azb$$O}DGw_f0rrp+Lht z1hUPsrLY-LPEnl`3wCWd!bSy)Vju-74;hhv)b&_E;fD{0W&u4gxS>G3S_(a6{^}g z-22XzT-}4n^z)6k0ZadsW-Aw`5B1MHDCf%GE(mjgwA|j{l|ZI<{JyrH52`QT zf0Akh0~|wSe6{lwWqBX;R=(Z-e{d|Vn9N|*b8|D9Z2@EfGkcilH#eSR=}p~Zn$yZ_ zXZvUwClq26jOxwuP7(qi#X%xYU~r($YiRtK?yxq81p5!gws0DimCDEUyr^Lxej%W3 zgSEh;1ULpB3sMhcxL4$S73jO0sZw21!`*-iljwkPgpu2G`ZPbb<@)ums5p~g?ufmC z4<;w{ix(pzj%H6d!J1eh5`$0x_X@-Xn>r59Yfw-Srf{4({KDw@F?^su6l?1zzF~4M z_7qMq4U^Aj?Ciqerz)+WpasMY;t9458xm|z9dVB5O z)kLrhKpvKPG@fo0{@JA#S0nj$?+3wyy~q$ch9?4SGHJcKm*R}4fSl?Nn!#S&!A}4_ zfhwS-tK0K4p$Dp1>=h^((ZQ8~QypgxQVJYED0uIJ3SW(7tw1v5cSiSzmR{PMwh?jT zXEq#bGP9<-?ax=+KGWIN@UmW(^%1IO$3B1)`uuaLXQg9xXFWDimGq47yMIQ28#@~k zCti=!kUuy(kLKSsgqD_*b5HX{xTg8{>88r`?iw5#sxZ$YZm>9nAnydz1P~PqDPf!+ zg{vk{4#ta5PJ;A>sYRKBGZ*L0qnNZ^)=8rxCm;8b%(-^zEH!iABOoUybyD^4Hi241 z3RF}RQ&ZB|ci3n3aXbpmI6wf|5+ELeVPJ&tX4QAP9SD`_KSGQ|%J*A zup=4LfaXX2; z6erL6Yr+cG_X2Q0=)*KSd^G&_OF^~8#IUZrj9FjM`~$V|qxg#sAV>kujCNH(hY95m zidO7!jd;xy(8$4t;yolkO}Ur~Aj!*-bF%u6>L~Yv44xmsFCMZ>zt*tds&RVoIJFn8 zpCI0-cW}@WZ881B`LbvdhOxV8L(#wmja=ycjLk{u!Csx6Yi%o zMsEn7H*aufJ2UuvpTjk2##&{;T@1Un+loGzt%)Z%kby4>Ko7tt-aBM6C$z)P9!LoM zd!EDKR-X+!pBzlk0`p1qI4Qu>Tq%uAR|im|wB;>YA|Q%3JE0D5B#p?|06SL(ECSvk zl5nU%Cmz6vjj+71%)_Rpvb>1Wjz##>q>wn(2_bd4IBI`dKc;IKm;f5~z<5azB%uD| zc|k1pXRcb$Ck!W!rV_a8&9lViv6$db!}|6Iir}dEdj_iNcVBhAT z%6k%lJjW-vn6qOZ^9p~mklQf)U%{8MD&gCq$Qvrr55j%~hD*K%*NyHe|A$+}Pe?rklP|0!v!R~a-I zDKsQd?KkZGJaFJ&!Ma6z-MKUA2~-E;sr}z-_3oO0^O+-oX`*`> zD>q!W^1mfw@h*UCaC#o~N!*hr+S25X>G{oX$Oo5h2~%l1r4XBSKj`gk)R1yaz1qq* zzMpE6RoYl*`3fR0eB_y0mXs_YAqNcO&G>v5oZqLN32Z!2NWIxbDSvwc)xU4dCCvJu z#j@hm50~>ou}wdivm&ma{8v%m-J`2OFKqKdLphywzvy22HQPEZwdNf5D7{Rxa1E8| z8wgQLxet{uQ})$o3o^CIQ955b0%{|}!lb061SrfKueRDDOfg+6b8g)6unv`ks>7>O z0fUr~5JT+zuFqB1%>ch0U~o6BSE1%uq}|a#wzL?iMSF*a9^(K^oMB-hDFi4=7}bFU zC~-AuMgufzSvUd6Y$_fxhz9V8OdNjv&_&lcMBt7%g|d{X-Rf7$=AzfB{%SJCK;O&N8uK?&#QloyRX%01+oo)<4mPrBgYL!n?6eLd4z#do*#~ z>6@OPg^gf9ij8*%1%Ghwm&+ipP%!+0OE&MFw(sBH6N4+l$f&86^KgP|Ko567y~y1{+@8Oq2d`) ztB;Wl5mmuCT1|cY%fYhQu=ILb=d&A?J#X&*o9~ZS=J-E<{seoNdJXM-;4{}3)9B^t zIRoLOI%>rH2|(_|hpW@&H5pJ$rc?ocKI7!%B&1!dh^~#^QjyLWL7kUQ|THGanMmS%@4ITR*ap?O`YQrNop=kU4Ttqu^#z!HcHZ{3R>^ z>6}8=^We&ohsyNpI^=sq-i9fyB$t7tKup1<80b1N0)R3nUD8&;^RmU`eC?9|9E`?C6_WW z{3mVFdYx!_CiE6w?LFepk0v|oHL5}u=p+NWN~qntNZ(h1AGt$3HSs2trH zE@&5ahUS2DG$H7c8Vi{V5f}c~5kn>)Y%Ay$LKr1ott4dv*G#6ff_d+p^xTC30w~o$ z+7hKIAzaDyVPaN*rV7#~k2<47Zq8eU2&G9&B$?8?ig>ajTSEqo5f`Iil^|icW9&jp zx{)D}1dtSt-esiFWT+A}z$qXaAVLYpByviVW+OO*Bc1*q86-*DU`pOS>W&M+S{$*u zvT|-lJC)q3eP~$8d$Y5y(SqSy=md8We?i)g#?K;h|NrET=}u^(l%irNoC;M{+h247 zhVdTuYVYoUnwf$uh}=ceRE{+C0=RVpG@2k!AQk}#fE^GDY=8B`g&wpq?D&I#ssmCk zK|($1oInMvf>2o`qXl$8Cg1(JXNzjJBd&cxl*|f2^-b;z@sUGkT+zhi?G0^hy5Ly} zgHBABkyjA*pAal|ZH2Lfl_Xg<2p#eS$soxh&dtPQ9`CMzvbj-lvgO^oRiTMtPf=~7 z$OnVCL#bl`AcNB4MALmtbs~i;tR5hTRDhK_66y~4lS10f36E{~OE%76)sc}q_pgRQ}M=JH%74uuiO6Jkl7 zOq9Yz$pCjfybo=GLLnI>svj>%i zIMEpU;4{bI;>-K6t_m9rRtKcmC*Nw}@1^2y$h~o8e>-upYas$%m}@SEbvqL3e8?Hd zP$x3|!zjD_uk|hG0u;$f$pt-Tw!_bY6#S7~xZre${ah1+gGNha;F-?2=S_}* ztZXfPW2TU#h7%i*J9nj=78j6o;<;Hx9vWa8qeQ$7GM8L+5!mPl zM~6}3c|dPX#!-`hXCXf+hmIh>KmF+pRNL_n6Bl-}+E*f>Bo=x$rNd zpzn05Z*4t*Ku9zkxSy&mfKQ3K1o4o}0$6}{2VB1k9oy1gc;iVMv?bml6lvhZU|5vtyhvxt@@h z#8}X5x9WIo9f=t$?lhjgq_;93RvHL3mQGGlm{Lx?Zvdf&b7BE%Hvfg`E6ei4O#d}r z;|}ta2=)=Ce`C@gF(D^d4#AS(JLt!V$OTqs6ygbtf|7!#4#|@=_C{u_GE@GE58u7~ zj9D;lu#_5@K2sJ&(}t5Hun0%1gTHeW9#+DZqXGuFcs)@J%s#4q(sqf90&Lo0oQG|N zcaQ)4fXL!OjY71<%x3s-_QN^Q#R%yH3dBPBVT<6RP|o2z+yl%5s66o!A*L4q16>;S zai9sZ57i%TEshGz3p(-A&ZwT^t$9Ug?u2FvSWR@6`QR`=jBY}p8c>h*kjX7%1~z&y ziV-^?i3h3xCvx=i0Yh7jzHZtK>6DczLBMaTx=9nvDu^=jdLFG`MB6I=C7oAUHCZW}t z@LotiC8E!Nz7SVGaVm8ksBfaiBGRMZ&BaSQVMdern#}*fl|__wcA&K;<@xh)gm8cX zk-%ke;DW*;t4_K^u|Bw4iZy!+LBBf`FS?O7Wx!ET9O3Xnt%i2DP8ietzNWe$NKR7t zKv(p+n#=JQlUdisZmnmD{*54I5GOIha|cZl3IGgWFE7o{mU0yBK^^=1_ixno5?+{( z@zSw9B4YsLOZLBZV5D`#S^sWsiPZ|IhgLf*Kk@Cqz5O(C$} zIQc&sn9J8`k_9M$tND%GqIAeD2NU!N`kM(OruW;rq13QIU?edBHyzA7acu&y@xycA zIi}xG2TJoUcP`=a?ZIh*&B~P8bO4Gk#88L`NThvLG%5KP1A~B46C@9!a*I@q0y09b zL=JiEYBH|`ST$a-@<3x6X7zmAvn(zJttqE2{*UEusZf;JbT zk^($(!0(qZ|11qq-428`;^WlTW=hDM9+l}`#Q6VG(KsL02q1e8Px}ltb_yOus`r`T zKT<*29bWZt?sMu4d~<6wRp<0c9gfhJ_T&i+rBxNqwh(PAL4dbknLR{!BGOUgGylCE z9Z#Rg09nii{LuLL(B=jIb?!&^&LiW9AkkNWq51i%Hoz_K1}0rFBL)s zu@oS<;BiC*6PU8lj-jlrKp3HKBIKU{K^=?A3(cg+LwM z0nac5>d@&q7UT$(Y=l~DTsw(H5?T77X=Tx+F-$?-E=V?M;G)x05y@bQ7NaR^8t`q96-tdjd%x=37HKD6WnJ=ABfox-U-@J6iU)iUE^iMYz#LAKp)a}R_=9f zFe~rqUb^~ZV%D3Sh(IqVD67Dp5QZP82(5wTVu2CE09 zGk#i9_Y(UQm=s_!dwY9`C@`L>8VR@m{0Wl^H z4Gu6!pfHGtDm@+UIwV{|Oo=vjYy!^0)wJuRCft>HQPO~LS=&?ft~M))K-jBHO#-0|P7} zTBGe%6*gaKTQ$~9ijK9*i2rQHi||kC)*V(s2Q}{KJbAV0e|k`5%@LN6V#qiZG;u)B za&ga%iOF)-fpL;<2e>j#m2WrV<47~EEPo!|Q?`Fyi0qeRTbmsBDeKKxe0e7_igm(r zKfb7cwdoelZSZ6OJ-drOulO?@#EGPSU5F-a2YOp@H*jxQj;&mZpry;k)}tXak$g2n z_BpMHfeD$Jk$?aGJz(9R7KlQh0Fr8Ut8mT(D{W|Oly3c~WRDXahhs$C{4sKMjXGiV zej!tdl%MY0rea=mtb!$%b(e>O29!e*?YmY}YTWwwtr^lj3CCH_?pex}2xdwT?f$*v zvHxuDGPnkeT|syq6rXA`gUb3PXq0{o%qe>rB~XEd?m$ED@bpVUr;oL4WK zHKYg>72hV5sJ5d^9v47XNQBm_|G8#MT`tX`7W!wEVs;ZEvC<-49wH#U$x8$m5ff8{ zZ^{~$a%cu06z$PFSh2`!vvvE*uTu){#g8k!r61URq5Y%xf6Ea{_sm#*waKw#%kH}T zKLU{>U#z%T-?fTaS_t{S{dWnCbw_!x-3*S3_x$<~LZS%@Myv5fwSchb|8xja{|8=E zNb4;K3wfo1R-2TDnwtzJESJ<3|9zEiv3F^_6{!>r-sY(;GvBejD3>@hC2)-8`N`^3wy zpPv~#xLYRl=A(0NU>2Zt0w#-YwV0nDa$yjmfyy5s?vwMS-3xp3jFMk3g-6+i+5h9{ zyXJX)HZtducb8iIgl;0Nzwq*hs%&naWjCvBDF#|K>7k|Q;fgHWRsGV9U*@_*6;(u{ z6y6jCH_keIL>Q5$ftdi@0Uj$^x=pOV_9=DkrkE>7l(R#&Ig3L6iQ9WuqtJ)%m0oH_ zwYL44#>j*mPI2~_(kJH%Kcfo9FKWAWOQWO#CM>ISC?%hz=WT-`%Px;S??iJo>~n)Q ziA!`>7zCtp?J5p7Chu1*kQ&80B_bO;E5>ZTi!(g=@!sN!Ma7^thsQF>yBi8zZ(KS~ z&VIkBeMm&xeCF*T6@$YzshP-B2V_w|MvO2kNHoSKJ1&+P-9QSiy-dWl(8=iwDmG#h z1KD1%g7szas8DxGqhzgdP!ahX9EK-LPUKg#&X*R~I~;l|*sAspJkX@LAVmcs=1EU*}agu9nj68?W+CgG4$w-X19hVwHP^QPvPM2%W7i5f+-d6xbx^7LZn;x{t z_Sb(l5VIBAFUMDY{f^d0vcw|)> zW^fRa8#|)+F&PB~`70swFwp`N>>)QM#VvAF1SH~lfY26We_jIygTvF2aeUxvKxZH# ze)&=Xo3=o7#2UZ80eKB#UO@&rP3y=BKkH5K2Mm*gO33#{v^2u9htUz=5H43C>s;6hjXb-4kQA=x%A$qCsk z*4vJ>QV^c-k^D5KVD^xj2YP=i2wR9p8o^=eV8m#&>s28ve?Z=0&>GY~(0n3VC#G+O z2Z#6M)6)!|R?9tCmaYNmQdQuz#WFy(`H(Q^NF7M=IM!O{bDK24#@HN@HL=q89{8sw zctl5|TDS7mzR0navrqjtfG(Wj&>mYGV`R}IFd&4eOWz7{{Q^eP|3)$J3wbsF_QH`h zq^kraLh11I1$>n;Uqa5yC`l*lMYcKOOdt-YFUUqW(nz}K%|o%;YKbrmnq7sGV}tRPZW z7{;iTd~a!ii_(4MWq={bq!mbJ^3iY$cbde!JQBYE0{Wu}0W}ICU5%W)5E{fV$PDIt z7T~$?CM&(V$lOWH$DAx32q3QufD zGLi8rLC?l#$@2`xy9c$rr0Q0Se3qV0N^c}X6jkSB09hk3)-EtQ zGheeUR4OfjDvgtWn^pIgiF$9(fa#SJ+)|L>d@Dx-y1Aj5C!PYxgPd^!z-xz^%yNT7 zd?y)+PKtigRnEWAv6d7r0NTkEn_X7<1g%nF`{Anr}w* zZQ~E7i6R0-Zwl2nV~Cx|w}hTP1iBCi8i?A1Byrf9#M=i_kkkFG%tLa0kKIrzd z!ZKU_uD#XW2+;t#%`r9CHCyNUA&3MB9ok5W>8DRA$jnD{Ae{d(3aWM$QN3f8No9@T z%i1;tI|!|fUt9y&MfRR#*PuCiqqh8I$nB=2$03?4Nm+npwHp=b^ezIBruVBbt08!* zT*)x}TyEi+pwI%nA)*_KDreb~aa zyysh=Ya=`c;NbDKvMq)5e!wOx?jyVYZmNL7>UYbw9)Jy5kA_`qSV*I37ieWW z$@Fo=xoUoHjv8KWg@7PLM_2NG^|^Xqu6)*wcSx4UZ{^XH&{r&9WhTG^OcaIX#cVZcpQ~u>c3|N3ZZ__~)Bfn;C}w<94sYjce*9{+c=QwNy$&X^4F0GkwMk zL1~WxX2-MFPL!1Duf5z?(6$8fV5om!ph>rR_vU}OsjE{d&r<-67j|DX8K&7~DXS|N z&Goo(f29KusxrXiQbBdq>(8 z{Epj1}eO;19%WYmU)lX=gl+`uCkT8=v)Fgx-`peuu?=@+6OyHOv0@ z9TK7J0`v#iZ>a^?)zZ;Rm!{d@)j2cUx;B+f?cFC;KJ_bwYSX8At-gZ+&04393VNE$ z#J8wV@0Gt0aQs5GmzSX-v`juzId@K@0VD@Nhy%cf1_mATw$_llPu&u~ZJN5i*m_b_ zKf9GZ>+~SUe8p1Q)fKwtUa(3D)1YSUtKI*Ob2JoUI5Gzffr8{JkPXx|WtF{AR=I zHD_>cqp4MOO1GrDq$huk_~bFsSsQXpeQC+T3uC)C>kM1fA(3&_lcB?Ln<6P`mFk%H`gW z-JoG7t7l)>^8IFjU2?8vpq*0Yvyfi%uiCEY{ID{7NB3(@AXF)J=kpXHb%1Dt^-+Jp zMo-gCzb+pgz1&5gs*xpsY`Odpp#`NO+9xXt`5YFc zUK?<><1M$FL{w$SCmw42HqmzmdR;2p%T@a7L{>qxxzFCyWz)iM|NHb!(P>Z2Ken+Q z;0^NHBTV~2tE-9sSX*$1tYXY+4UUx2l%9jZ*6#}|S*#9*HJ|V=EFCYW*9_jY)#9-x zYg%W*`mAr(6J<2ZW-POh%zuVhCTdHZGQYK>ze`W9XdiY6 zmWiHvxV1R8>F1JlW~)Q7!y|2_`OTg4wsY`4Q@b$F zQ^ssc=ZO)~WpIo!;%iTH$hs-}g=!<6YnXvA)cn#mH)sDEI9e zlOxx+!_M3`^xg^$V}W#E>o2QjDHkRTNmV>Kb+%A}QI%1HJ*RWtO#9bL&Ew?Ba_CXT zc=djk8>rAFw%c0sd7ki;_{qLS@`iGn=NXF|>jimqMVLAM#)tndh!ip1yLXL{$C(Xb zfs`6^QbHaU;qI0pBTGj)G82UJ^fYxZ%~J{L>rS_awOsN1&QG6YE@v9NM`FhSA-Un;S9oa41${{IvwsDJP+1pIt#Sx{>2aQgh+*?I0Jgkxe*~;n8NCke`z`dKN zKbFCMtpm?+1ZK-Iv_qxWGyPwxhu`k<~zr=qJ@H`Tr_ zJ-0Jh^j^pd#^~bwhQ`w2zhTnMXN4ALnv6ub<~L{8l`l1YU$U%CHouZGr#)DwJ0G(} zcjb z5aS5lbbaOZa2-o$e=WIy)W^59>rOJY&1e*6Cz#6}mNPEjuc-Mb9k?cG&?(RAFy80(QQ_xMU8U{*!-7*ufT&#N25IG`-YESZ`S;pIF(f-uzy%;&Dq=+V-25p(uxj`avykC$&x)@W0qZtzRijKhiO6Tsz9tO;PD;!?1D9prShP9@yVAC5UE)7@)0;`PT;Rr|z+Ryp z3qKnAY59MgwTwFctXbpx260%dA+<4xvtFJIQBsebKGF6hN=KufJ2p$8jlFHkeM(g5 zMA9FfW?W8N)%nBuo!nWp(?N4({b8HTghQGSJ4y)JMCHpLqfB3Uqx~V1`O5d?l_TnM_g?L6yvFQkJrM7)!@8TDjlTapLaEmrv!d32qr_b`5QH?~;pFFe;s5 z%~G}LqHR6l|1vCqBjC;FqzVO%u@BUZd*9|nm)nbsa{mov;CRt+jPAO8i@3LHqnFVC zSJ$<`C7En-Of$>;C15F7;RCQp4N);!&1>NUMY-P8wCyDig+#@~Lbvj+se$P#nif7w z%_2#q486&1dmyB(`7AZdUDImawACKejm-N2)!loy%kMYe3^V^TGiT1sob!F0`B+WS zEIFBzQW3?|rFJJ2oUN@LgzLK~8noQo4 zJRThiz9{u-l)^0%*+^TsxN6oa8in+hJM|e!gzHOo%AsbQfG$i|W4L-ar=a|Jh+c#F==0bU4 zi0nuGD)J6gY=Ak*+#=h2-$mJ*P1NAamTWJOBAn`M`oFVRJCHGJN0@U>(w7~kE+!tE zxH;c*46I)qHp?(udEv|(tCg3}0B*Xs1WyGz8*KbFx=-~y9ng)MKKXg6g4HtoASyMd z)$_iK{Q)!?9jRWH6orhjY`QVN2)~nUY!na7nQlYtq;bPqL%H2qKge%7Gu^~|ftAV| zi_&x|?kG(?_$RBFfxa#3`Zcv%@L^4C$p=8QnL3#E%=&)YXOzg>$+ojyoVDht!t2wa zHh&T_6yJ>K2`zxN1h%k>#Z#w#Gr@k#cpkn%u7VI4$#u z#!ZRLD92ZQQ=SJLM3Cf5*&%g%ltMvoCZ{#c(Zy7d5k~FyGM_mlHBR+|G<5Q#Z@!#b zI;pod29>{Vo8qk<>79?-Ju>@NG+a~Z+TQ^F7K#o#Q^MblD_s}?@;QG2kiK5x&Vj`PVPnL}!#AL66?BR{->MphFvcn@a<;v#b_^0Bx zd(aC*y(qmW{A-TG9CcJZvMa4kx_E?rfXA<=r@E6km7vX8-u4T6sCz zt4SvW;>!+mZuMZViA)`nH(lO+({+ z`Zc^Mz4-hCiOcIBSDHPdC`OkpHEVofj8-qFE=(n4Z1N|vnbj!FJ}6i={=0ozMlrY% zwsgHoq2yf>*JRPwQ_K#?IZ*z{N9wv#e-m4St8C1VZy!lk7U4nw>i|>sBK#C|z}A zr^l)7+NY65wh({!LU8garei`)lc2(W_bAkJZ3)+;nONXCg&4auuyKAtkFP5&W+&3_rx9x;~7h>k0 z!l-*hzsA1wiLBL8#)~5vHmL{EyE1a|5Az{lKiVdjVNl z+Q}c{B~(~|O+_oIg2Bq)mM@xW1YYT$YN(p%7|22#dv`EK+LiU4DD^1g@r-q6T-`+;Xc4>m?5&T1$o@F$!sG~oS8b-IJc*CRs8t(^f0`4>_B4v7=DV#>Rc(2ZaRfz@ z{MEUlA#6Q49PUprN|4aRydA}6ay*6_^TCoN2n*m%3xw)pCrMBHw#rjrQdGZZk<_P; zv!Zs6Abr0MtjRwiB5Ni}L?)Wrks`8)JBkqh6n-h=QX{OG0oa01%F-_Eb{()?26eKe z%+@!M2{Ska(Hl-Vc`8IT7d{;t{?7lZCb*7lcd$Zwm)24hey6Uz2WUdn&eh$qUasw# zEK5s29H#)pxI)Dop?7cz-bz+XO%@yX3|ICuzu7&#wRB&(ens?m)6?RM6SgWBv;W>{ zzE1>c>3m>M)lW}xAl~F5wT{ca*r!I{Fk$wvGull4e3Wca3%P<+WC>*C3+zXAegn= z1WlFvwugCzA$HF9WJp4~-r0Gai;6HHh!^4iB3pWmEOQuTfBFs1LS#`EZA-On@7RU8 zY*x4bE|Y&wxAwBwduy`k_f3X4^+nk7poC9e&t3G3JTnI7_$~3n1Nzy*rUn6^P(K$Q z<^fV`SY;ME*Vi%g6;qh^q1)SagR&{rlkv$~(s%W1*NVmu8J<t!oq_JBa3Tt*Zr zHY5au0n#QQBLg@H`13K40T4DDSbs_T8>|Bv>hc$XK*v}hqfeMnAl7~gfuN<)$A(2Y zpe4XJ2oQLu7kx$>%TF7AP8+=h%0Rx`wvtFdyp6V>!AMMDCUMuq-KzvLP+Y$#EO7`(FeJNpx0h+W#l`*U!B*0KvL1 zEpS;0j1+E60_Q6<9{NVQ;fer&#zITO;IU!}e!zti8=C@XZI4ZkHoH&cRsiMJU&{Z= z;xX7!M4;xx6jo9q$i)Na=4f;6bw2dZu;@R)aCp3<&EfD|7yw)KrM8F^#{R8IfUj;M zS3hJe6FbHLgpLOmjYgm)ankm);+R~}*3T&$Uk-=OupuSVoE>enqh~e*7Xvhw+FG?V zbVdqoKa0cFc8|zP;jm-UEUio00Em?q!b{)~zQzSz)Ex9D%>S?ucE>CRz`#G(^T+=G WN7MtqTsj1xKqS9VV&j&*ng0eP(x>YH literal 0 HcmV?d00001 diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst index d7b7f281634..015ba43644d 100755 --- a/doc/sphinx-guides/source/developers/classic-dev-env.rst +++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst @@ -88,6 +88,8 @@ On Mac, run this command: On Linux, install ``jq`` from your package manager or download a binary from https://stedolan.github.io/jq/ +.. _install-payara-dev: + Install Payara ~~~~~~~~~~~~~~ From efbdb72d8d8d985dc1efa4141bd14041e397c242 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Feb 2024 22:18:01 -0500 Subject: [PATCH 0666/1112] US English spelling for consistency #9590 --- doc/sphinx-guides/source/container/dev-usage.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 85b1b3e5f05..3e7dd374036 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -231,7 +231,7 @@ When opting for builtin features or Payara tools, please follow these steps: .. image:: img/intellij-payara-config-startup.png - You might want to tweak the hot deploy behaviour in the "Server" tab now. + You might want to tweak the hot deploy behavior in the "Server" tab now. "Update action" can be found in the run window (see below). "Frame deactivation" means switching from IntelliJ window to something else, e.g. your browser. *Note: static resources like properties, XHTML etc will only update when redeploying!* @@ -271,7 +271,7 @@ When opting for builtin features or Payara tools, please follow these steps: .. image:: img/intellij-payara-run-toolbar.png Watch the WAR build and the deployment unfold. - Note the "Update" action button (see config to change its behaviour). + Note the "Update" action button (see config to change its behavior). .. image:: img/intellij-payara-run-output.png From 6666857ef8d425bb46fd83569f6b3f1b1d620dc2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Feb 2024 22:22:12 -0500 Subject: [PATCH 0667/1112] switch away from hard-coded numbers in lists #9590 --- doc/sphinx-guides/source/container/dev-usage.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 3e7dd374036..d2bf820d89a 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -154,21 +154,21 @@ IDE-triggered re-deployments You have at least two options: -1. Use builtin features of IDEs or plugins for different IDEs by Payara to ease the burden of redeploying an application during development to a running Payara application server. +#. Use builtin features of IDEs or plugins for different IDEs by Payara to ease the burden of redeploying an application during development to a running Payara application server. Their guides contain `documentation on Payara IDE plugins `_. -2. Use a paid product like `JRebel `_. +#. Use a paid product like `JRebel `_. The main difference between the first and the second option is support for hot deploys of non-class files plus limitations in what the JVM HotswapAgent can do for you. Find more `details in a blog article by JRebel `_. When opting for builtin features or Payara tools, please follow these steps: -1. | Download the Payara appserver to your machine, unzip and note the location for later. +#. | Download the Payara appserver to your machine, unzip and note the location for later. | - See :ref:`payara` for which version or run the following command | ``mvn help:evaluate -Dexpression=payara.version -q -DforceStdout`` | - To download, see :ref:`payara` or try `Maven Central `_. -2. Install Payara tools plugin in your IDE: +#. Install Payara tools plugin in your IDE: .. tabs:: .. group-tab:: Netbeans @@ -182,7 +182,7 @@ When opting for builtin features or Payara tools, please follow these steps: .. image:: img/intellij-payara-plugin-install.png -3. Configure a connection to the application server: +#. Configure a connection to the application server: .. tabs:: .. group-tab:: Netbeans @@ -238,13 +238,13 @@ When opting for builtin features or Payara tools, please follow these steps: .. image:: img/intellij-payara-config-server-behaviour.png -4. | Start all the containers. Follow the cheat sheet above, but take care to skip application deployment: +#. | Start all the containers. Follow the cheat sheet above, but take care to skip application deployment: | - When using the Maven commands, append ``-Dapp.deploy.skip``. For example: | ``mvn -Pct docker:run -Dapp.deploy.skip`` | - When using Docker Compose, prepend the command with ``SKIP_DEPLOY=1``. For example: | ``SKIP_DEPLOY=1 docker compose -f docker-compose-dev.yml up`` | - Note: the Admin Console can be reached at http://localhost:4848 or https://localhost:4949 -5. To deploy the application to the running server, use the configured tools to deploy. +#. To deploy the application to the running server, use the configured tools to deploy. Using the "Run" configuration only deploys and enables redeploys, while running "Debug" enables hot swapping of classes via JDWP. .. tabs:: From 0c0067e5e21775c6ea8a8635cb16599493708930 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Feb 2024 22:56:16 -0500 Subject: [PATCH 0668/1112] various doc tweaks #9590 --- .../source/container/dev-usage.rst | 45 ++++++++++--------- 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index d2bf820d89a..6dbd0276cb3 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -144,29 +144,30 @@ Alternatives: Redeploying ----------- -Rebuild and Running Images -^^^^^^^^^^^^^^^^^^^^^^^^^^ +Rebuilding and Running Images +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The safest way to redeploy code is to stop the running containers (with Ctrl-c if you started them in the foreground) and then build and run them again with ``mvn -Pct clean package docker:run``. +The safest and most reliable way to redeploy code is to stop the running containers (with Ctrl-c if you started them in the foreground) and then build and run them again with ``mvn -Pct clean package docker:run``. -IDE-triggered re-deployments -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +IDE-Triggered Redeployments +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Triggering redeployment using an IDE can greatly improve your feedback look when changing code. You have at least two options: -#. Use builtin features of IDEs or plugins for different IDEs by Payara to ease the burden of redeploying an application during development to a running Payara application server. - Their guides contain `documentation on Payara IDE plugins `_. +#. Use builtin features of IDEs or `IDE plugins from Payara `_. #. Use a paid product like `JRebel `_. -The main difference between the first and the second option is support for hot deploys of non-class files plus limitations in what the JVM HotswapAgent can do for you. -Find more `details in a blog article by JRebel `_. +The main differences between the first and the second options are support for hot deploys of non-class files and limitations in what the JVM HotswapAgent can do for you. +Find more details in a `blog article by JRebel `_. -When opting for builtin features or Payara tools, please follow these steps: +To make use of builtin features or Payara tools (option 1), please follow these steps: -#. | Download the Payara appserver to your machine, unzip and note the location for later. - | - See :ref:`payara` for which version or run the following command - | ``mvn help:evaluate -Dexpression=payara.version -q -DforceStdout`` - | - To download, see :ref:`payara` or try `Maven Central `_. +#. | Download the version of Payara shown in :ref:`install-payara-dev` and unzip it to a reasonable location such as ``/usr/local/payara6``. + | - Note that Payara can also be downloaded from `Maven Central `_. + | - Note that another way to check the expected version of Payara is to run this command: + | ``mvn help:evaluate -Dexpression=payara.version -q -DforceStdout`` #. Install Payara tools plugin in your IDE: @@ -182,16 +183,14 @@ When opting for builtin features or Payara tools, please follow these steps: .. image:: img/intellij-payara-plugin-install.png -#. Configure a connection to the application server: +#. Configure a connection to Payara: .. tabs:: .. group-tab:: Netbeans - Unzip Payara to ``/usr/local/payara6`` as explained in :ref:`install-payara-dev`. - - Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara6``. Use the settings in the screenshot below. Most of the defaults are fine. + Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara6`` (or wherever you unzipped Payara). Choose "Remote Domain". Use the settings in the screenshot below. Most of the defaults are fine. - Under "Common", the password should be "admin". Make sure "Enable Hot Deploy" is checked. + Under "Common", the username and password should be "admin". Make sure "Enable Hot Deploy" is checked. .. image:: img/netbeans-servers-common.png @@ -203,7 +202,7 @@ When opting for builtin features or Payara tools, please follow these steps: .. image:: img/netbeans-compile.png - Under "Run", select "Payara Server" under "Server" and make sure "Deploy on Save" is checked. + Under "Run", under "Server", select "Payara Server". Make sure "Deploy on Save" is checked. .. image:: img/netbeans-run.png @@ -265,6 +264,12 @@ When opting for builtin features or Payara tools, please follow these steps: Check to make sure the change is live by visiting, for example, http://localhost:8080/api/info/version + See below for a `video `_ demonstrating the steps above but please note that the ports used have changed and now that we have the concept of "skip deploy" the undeployment step shown is no longer necessary. + + .. raw:: html + + + .. group-tab:: IntelliJ Choose "Run" or "Debug" in the toolbar. From b2d5ea8381e15cc861e04b7a33a20a10b20762e0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 13 Feb 2024 23:03:05 -0500 Subject: [PATCH 0669/1112] add release note #9590 --- doc/release-notes/9590-faster-redeploy.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9590-faster-redeploy.md diff --git a/doc/release-notes/9590-faster-redeploy.md b/doc/release-notes/9590-faster-redeploy.md new file mode 100644 index 00000000000..caaa688bf58 --- /dev/null +++ b/doc/release-notes/9590-faster-redeploy.md @@ -0,0 +1,3 @@ +In the Container Guide, documentation for developers on how to quickly redeploy code has been improved for IntelliJ and Netbeans is now covered. + +Also in the context of containers, a new option to skip deployment has been added and the war file is now consistently named "dataverse.war" rather than having a version in the filename, such as "dataverse-6.1.war". This predictability makes tooling easier. From fc8aac32fa7f1b5d53cdc8f4ca1127b5493f2885 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 14 Feb 2024 11:50:29 +0000 Subject: [PATCH 0670/1112] Fixed: GetLatestAccessibleFileMetadataCommand --- .../GetLatestAccessibleFileMetadataCommand.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java index 980563a5489..a2022adbc27 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -19,16 +20,20 @@ public GetLatestAccessibleFileMetadataCommand(DataverseRequest aRequest, DataFil @Override public FileMetadata execute(CommandContext ctxt) throws CommandException { - FileMetadata fileMetadata = ctxt.engine().submit( - new GetLatestPublishedFileMetadataCommand(getRequest(), dataFile) - ); + FileMetadata fileMetadata = null; - if (fileMetadata == null) { + if (ctxt.permissions().requestOn(getRequest(), dataFile.getOwner()).has(Permission.ViewUnpublishedDataset)) { fileMetadata = ctxt.engine().submit( new GetDraftFileMetadataIfAvailableCommand(getRequest(), dataFile) ); } + if (fileMetadata == null) { + fileMetadata = ctxt.engine().submit( + new GetLatestPublishedFileMetadataCommand(getRequest(), dataFile) + ); + } + return fileMetadata; } } From 153b9ae0f04283386c6f7a70920285aece25a990 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 14 Feb 2024 12:00:23 +0000 Subject: [PATCH 0671/1112] Added: FilesIT testGetFileInfo test cases --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 45 ++++++++++++++++--- 1 file changed, 39 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index d84b0ed77ac..9af457c35c4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -11,8 +11,7 @@ import org.junit.jupiter.api.BeforeAll; import io.restassured.path.json.JsonPath; -import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; -import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST_PUBLISHED; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.path.json.JsonPath.with; import io.restassured.path.xml.XmlPath; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -1450,9 +1449,9 @@ public void testGetFileInfo() { .statusCode(NOT_FOUND.getStatusCode()); // Update the file metadata - String newFileName = "trees_2.png"; + String newFileNameFirstUpdate = "trees_2.png"; JsonObjectBuilder updateFileMetadata = Json.createObjectBuilder() - .add("label", newFileName); + .add("label", newFileNameFirstUpdate); Response updateFileMetadataResponse = UtilIT.updateFileMetadata(dataFileId, updateFileMetadata.build().toString(), superUserApiToken); updateFileMetadataResponse.then().statusCode(OK.getStatusCode()); @@ -1471,12 +1470,46 @@ public void testGetFileInfo() { publishDatasetResp.then().assertThat() .statusCode(OK.getStatusCode()); + // Update the file metadata once again + String newFileNameSecondUpdate = "trees_3.png"; + updateFileMetadata = Json.createObjectBuilder() + .add("label", newFileNameSecondUpdate); + updateFileMetadataResponse = UtilIT.updateFileMetadata(dataFileId, updateFileMetadata.build().toString(), superUserApiToken); + updateFileMetadataResponse.then().statusCode(OK.getStatusCode()); + // Regular user should get to see latest published file data getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, DS_VERSION_LATEST_PUBLISHED); getFileDataResponse.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.label", equalTo(newFileName)); - // TODO + .body("data.label", equalTo(newFileNameFirstUpdate)); + + // Regular user should get to see latest published file data if latest is requested + getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, DS_VERSION_LATEST); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo(newFileNameFirstUpdate)); + + // Superuser should get to see draft file data if latest is requested + getFileDataResponse = UtilIT.getFileData(dataFileId, superUserApiToken, DS_VERSION_LATEST); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo(newFileNameSecondUpdate)); + + // Publish dataset once again + publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", superUserApiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Regular user should get to see file data by specific version number + getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, "2.0"); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo(newFileNameFirstUpdate)); + + getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular, "3.0"); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo(newFileNameSecondUpdate)); // Cleanup Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, superUserApiToken); From 2a30f328b91faade4a0827a826d51095a1788053 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 14 Feb 2024 14:05:51 +0000 Subject: [PATCH 0672/1112] Stash: includeDeaccessioned support on get file info endpoint wip --- ...etLatestAccessibleFileMetadataCommand.java | 6 ++-- ...GetLatestPublishedFileMetadataCommand.java | 4 ++- ...edFileMetadataByDatasetVersionCommand.java | 33 +++++++++++-------- 3 files changed, 26 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java index a2022adbc27..fa80b75c593 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleFileMetadataCommand.java @@ -12,10 +12,12 @@ @RequiredPermissions({}) public class GetLatestAccessibleFileMetadataCommand extends AbstractCommand { private final DataFile dataFile; + private final boolean includeDeaccessioned; - public GetLatestAccessibleFileMetadataCommand(DataverseRequest aRequest, DataFile dataFile) { + public GetLatestAccessibleFileMetadataCommand(DataverseRequest aRequest, DataFile dataFile, boolean includeDeaccessioned) { super(aRequest, dataFile); this.dataFile = dataFile; + this.includeDeaccessioned = includeDeaccessioned; } @Override @@ -30,7 +32,7 @@ public FileMetadata execute(CommandContext ctxt) throws CommandException { if (fileMetadata == null) { fileMetadata = ctxt.engine().submit( - new GetLatestPublishedFileMetadataCommand(getRequest(), dataFile) + new GetLatestPublishedFileMetadataCommand(getRequest(), dataFile, includeDeaccessioned) ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java index 147a0fdce76..4056d145917 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedFileMetadataCommand.java @@ -11,10 +11,12 @@ @RequiredPermissions({}) public class GetLatestPublishedFileMetadataCommand extends AbstractCommand { private final DataFile dataFile; + private final boolean includeDeaccessioned; - public GetLatestPublishedFileMetadataCommand(DataverseRequest aRequest, DataFile dataFile) { + public GetLatestPublishedFileMetadataCommand(DataverseRequest aRequest, DataFile dataFile, boolean includeDeaccessioned) { super(aRequest, dataFile); this.dataFile = dataFile; + this.includeDeaccessioned = includeDeaccessioned; } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java index 84a51f6b31d..82350d3bd95 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedFileMetadataByDatasetVersionCommand.java @@ -1,43 +1,48 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import java.util.List; - @RequiredPermissions({}) public class GetSpecificPublishedFileMetadataByDatasetVersionCommand extends AbstractCommand { private final long majorVersion; private final long minorVersion; private final DataFile dataFile; + private final boolean includeDeaccessioned; - public GetSpecificPublishedFileMetadataByDatasetVersionCommand(DataverseRequest aRequest, DataFile dataFile, long majorVersion, long minorVersion) { + public GetSpecificPublishedFileMetadataByDatasetVersionCommand(DataverseRequest aRequest, DataFile dataFile, long majorVersion, long minorVersion, boolean includeDeaccessioned) { super(aRequest, dataFile); this.dataFile = dataFile; this.majorVersion = majorVersion; this.minorVersion = minorVersion; + this.includeDeaccessioned = includeDeaccessioned; } @Override public FileMetadata execute(CommandContext ctxt) throws CommandException { - List fileMetadatas = dataFile.getFileMetadatas(); - - for (FileMetadata fileMetadata : fileMetadatas) { - DatasetVersion datasetVersion = fileMetadata.getDatasetVersion(); + return dataFile.getFileMetadatas().stream() + .filter(fileMetadata -> isRequestedVersionFileMetadata(fileMetadata, ctxt)) + .findFirst() + .orElse(null); + } - if (datasetVersion.isPublished() && - datasetVersion.getVersionNumber().equals(majorVersion) && - datasetVersion.getMinorVersionNumber().equals(minorVersion)) { - return fileMetadata; - } - } + private boolean isRequestedVersionFileMetadata(FileMetadata fileMetadata, CommandContext ctxt) { + DatasetVersion datasetVersion = fileMetadata.getDatasetVersion(); + Dataset ownerDataset = dataFile.getOwner(); + return (datasetVersion.isReleased() || isDatasetVersionDeaccessionedAndAccessible(datasetVersion, ownerDataset, ctxt)) + && datasetVersion.getVersionNumber().equals(majorVersion) + && datasetVersion.getMinorVersionNumber().equals(minorVersion); + } - return null; + private boolean isDatasetVersionDeaccessionedAndAccessible(DatasetVersion datasetVersion, Dataset ownerDataset, CommandContext ctxt) { + return includeDeaccessioned && datasetVersion.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ownerDataset).has(Permission.EditDataset); } } From b5aeb258d025036c2da808d775e8d5bbf15add38 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 14 Feb 2024 14:06:20 +0000 Subject: [PATCH 0673/1112] Stash: includeDeaccessioned support on get file info endpoint wip (2) --- .../edu/harvard/iq/dataverse/api/Files.java | 30 ++++++++++++++----- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 4116bf18973..55d65bae96b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -486,23 +486,37 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa @GET @AuthRequired @Path("{id}") - public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> getFileDataResponse(req, fileIdOrPersistentId, uriInfo, headers, DS_VERSION_LATEST), getRequestUser(crc)); + public Response getFileData(@Context ContainerRequestContext crc, + @PathParam("id") String fileIdOrPersistentId, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response( req -> getFileDataResponse(req, fileIdOrPersistentId, DS_VERSION_LATEST, includeDeaccessioned, uriInfo, headers), getRequestUser(crc)); } @GET @AuthRequired @Path("{id}/versions/{datasetVersionId}") - public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("datasetVersionId") String datasetVersionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> getFileDataResponse(req, fileIdOrPersistentId, uriInfo, headers, datasetVersionId), getRequestUser(crc)); + public Response getFileData(@Context ContainerRequestContext crc, + @PathParam("id") String fileIdOrPersistentId, + @PathParam("datasetVersionId") String datasetVersionId, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response( req -> getFileDataResponse(req, fileIdOrPersistentId, datasetVersionId, includeDeaccessioned, uriInfo, headers), getRequestUser(crc)); } - private Response getFileDataResponse(final DataverseRequest req, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, String datasetVersionId) throws WrappedResponse { + private Response getFileDataResponse(final DataverseRequest req, + String fileIdOrPersistentId, + String datasetVersionId, + boolean includeDeaccessioned, + UriInfo uriInfo, + HttpHeaders headers) throws WrappedResponse { final DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); FileMetadata fileMetadata = execCommand(handleVersion(datasetVersionId, new Datasets.DsVersionHandler<>() { @Override public Command handleLatest() { - return new GetLatestAccessibleFileMetadataCommand(req, dataFile); + return new GetLatestAccessibleFileMetadataCommand(req, dataFile, includeDeaccessioned); } @Override @@ -512,12 +526,12 @@ public Command handleDraft() { @Override public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedFileMetadataByDatasetVersionCommand(req, dataFile, major, minor); + return new GetSpecificPublishedFileMetadataByDatasetVersionCommand(req, dataFile, major, minor, includeDeaccessioned); } @Override public Command handleLatestPublished() { - return new GetLatestPublishedFileMetadataCommand(req, dataFile); + return new GetLatestPublishedFileMetadataCommand(req, dataFile, includeDeaccessioned); } })); From 48e71ec74a49ad6145f93f38c07b175533642813 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 14 Feb 2024 14:40:48 +0000 Subject: [PATCH 0674/1112] Refactor: DataFile getLatestFileMetadata and getLatestPublishedFileMetadata methods --- .../edu/harvard/iq/dataverse/DataFile.java | 68 +++++++++---------- 1 file changed, 31 insertions(+), 37 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 3d8086b142b..8f2e0d4261e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -549,57 +549,51 @@ public void setDescription(String description) { public FileMetadata getFileMetadata() { return getLatestFileMetadata(); } - + public FileMetadata getLatestFileMetadata() { - FileMetadata fmd = null; + FileMetadata resultFileMetadata = null; - // for newly added or harvested, just return the one fmd if (fileMetadatas.size() == 1) { return fileMetadatas.get(0); } - + for (FileMetadata fileMetadata : fileMetadatas) { - // if it finds a draft, return it if (fileMetadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) { return fileMetadata; - } - - // otherwise return the one with the latest version number - // duplicate logic in getLatestPublishedFileMetadata() - if (fmd == null || fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber() ) > 0 ) { - fmd = fileMetadata; - } else if ((fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber())==0 )&& - ( fileMetadata.getDatasetVersion().getMinorVersionNumber().compareTo( fmd.getDatasetVersion().getMinorVersionNumber()) > 0 ) ) { - fmd = fileMetadata; } + resultFileMetadata = getTheNewerFileMetadata(resultFileMetadata, fileMetadata); } - return fmd; + + return resultFileMetadata; } - -// //Returns null if no published version + public FileMetadata getLatestPublishedFileMetadata() throws UnsupportedOperationException { - FileMetadata fmd = null; - - for (FileMetadata fileMetadata : fileMetadatas) { - // if it finds a draft, skip - if (fileMetadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) { - continue; - } - - // otherwise return the one with the latest version number - // duplicate logic in getLatestFileMetadata() - if (fmd == null || fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber() ) > 0 ) { - fmd = fileMetadata; - } else if ((fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber())==0 )&& - ( fileMetadata.getDatasetVersion().getMinorVersionNumber().compareTo( fmd.getDatasetVersion().getMinorVersionNumber()) > 0 ) ) { - fmd = fileMetadata; - } - } - if(fmd == null) { + FileMetadata resultFileMetadata = fileMetadatas.stream() + .filter(metadata -> !metadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) + .reduce(null, this::getTheNewerFileMetadata); + + if (resultFileMetadata == null) { throw new UnsupportedOperationException("No published metadata version for DataFile " + this.getId()); } - return fmd; + return resultFileMetadata; + } + + private FileMetadata getTheNewerFileMetadata(FileMetadata currentFileMetadata, FileMetadata newFileMetadata) { + if (currentFileMetadata == null) { + return newFileMetadata; + } + + DatasetVersion currentVersion = currentFileMetadata.getDatasetVersion(); + DatasetVersion newVersion = newFileMetadata.getDatasetVersion(); + + if (newVersion.getVersionNumber().compareTo(currentVersion.getVersionNumber()) > 0 || + (newVersion.getVersionNumber().compareTo(currentVersion.getVersionNumber()) == 0 && + newVersion.getMinorVersionNumber().compareTo(currentVersion.getMinorVersionNumber()) > 0)) { + return newFileMetadata; + } + + return currentFileMetadata; } /** @@ -610,7 +604,7 @@ public long getFilesize() { if (this.filesize == null) { // -1 means "unknown" return -1; - } + } return this.filesize; } From 227fe53ba707e24d89c3e045337cf5e3860a9820 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 14 Feb 2024 15:57:47 +0000 Subject: [PATCH 0675/1112] Refactor: using DatasetVersion.compareByVersion in getTheNewerFileMetadata --- .../edu/harvard/iq/dataverse/DataFile.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 8f2e0d4261e..818cade1eef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -579,21 +579,19 @@ public FileMetadata getLatestPublishedFileMetadata() throws UnsupportedOperation return resultFileMetadata; } - private FileMetadata getTheNewerFileMetadata(FileMetadata currentFileMetadata, FileMetadata newFileMetadata) { - if (currentFileMetadata == null) { - return newFileMetadata; + private FileMetadata getTheNewerFileMetadata(FileMetadata current, FileMetadata candidate) { + if (current == null) { + return candidate; } - DatasetVersion currentVersion = currentFileMetadata.getDatasetVersion(); - DatasetVersion newVersion = newFileMetadata.getDatasetVersion(); + DatasetVersion currentVersion = current.getDatasetVersion(); + DatasetVersion candidateVersion = candidate.getDatasetVersion(); - if (newVersion.getVersionNumber().compareTo(currentVersion.getVersionNumber()) > 0 || - (newVersion.getVersionNumber().compareTo(currentVersion.getVersionNumber()) == 0 && - newVersion.getMinorVersionNumber().compareTo(currentVersion.getMinorVersionNumber()) > 0)) { - return newFileMetadata; + if (DatasetVersion.compareByVersion.compare(candidateVersion, currentVersion) > 0) { + return candidate; } - return currentFileMetadata; + return current; } /** From 1dc4825cb1aa2f204958782e238ad77ac4e231b6 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 13:19:13 -0500 Subject: [PATCH 0676/1112] update perms --- .../edu/harvard/iq/dataverse/FilePage.java | 39 +++++++++++-------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 4e5843964e7..37798f1cd3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -484,7 +484,7 @@ public String restrictFile(boolean restricted) throws CommandException{ public String ingestFile() throws CommandException{ User u = session.getUser(); - if(!u.isAuthenticated() || !(permissionService.permissionsFor(u, file).contains(Permission.PublishDataset))) { + if(!u.isAuthenticated() || !u.isSuperuser()) { //Shouldn't happen (choice not displayed for users who don't have the right permission), but check anyway logger.warning("User: " + u.getIdentifier() + " tried to ingest a file"); JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantIngestFileWarning")); @@ -544,23 +544,29 @@ public String ingestFile() throws CommandException{ } public String uningestFile() throws CommandException { - + if (!file.isTabularData()) { - if(file.isIngestProblem()) { - User u = session.getUser(); - if(!u.isAuthenticated() || !(permissionService.permissionsFor(u, file).contains(Permission.PublishDataset))) { - logger.warning("User: " + u.getIdentifier() + " tried to uningest a file"); - //Shouldn't happen (choice not displayed for users who don't have the right permission), but check anyway - JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); - return null; - } - file.setIngestDone(); - file.setIngestReport(null); + //Ingest never succeeded, either there was a failure or this is not a tabular data file + User u = session.getUser(); + if (!u.isAuthenticated() || !u.isSuperuser()) { + logger.warning("User: " + u.getIdentifier() + " tried to uningest a file"); + // Shouldn't happen (choice not displayed for users who don't have the right + // permission), but check anyway + JH.addMessage(FacesMessage.SEVERITY_WARN, + BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); + return null; + } + if (file.isIngestProblem()) { + file.setIngestDone(); + file.setIngestReport(null); } else { - JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); - return null; + //Shouldn't happen - got called when there is no tabular data or an ingest problem + JH.addMessage(FacesMessage.SEVERITY_WARN, + BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); + return null; } } else { + //Uningest command does it's own check for isSuperuser commandEngine.submit(new UningestFileCommand(dvRequestService.getDataverseRequest(), file)); Long dataFileId = file.getId(); file = datafileService.find(dataFileId); @@ -580,12 +586,11 @@ public String uningestFile() throws CommandException { } } save(); - //Refresh filemetadata with file title, etc. + // Refresh filemetadata with file title, etc. init(); JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("file.uningest.complete")); return returnToDraftVersion(); - } - + } private List filesToBeDeleted = new ArrayList<>(); From f15122615aea9109ed90b2ff5c4e6a4965f8efcf Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 13:19:23 -0500 Subject: [PATCH 0677/1112] add bundle strings --- src/main/java/propertyFiles/Bundle.properties | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index f1c8381816c..42c844e532e 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2203,6 +2203,15 @@ ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. ingest.csv.nullStream=Stream can't be null. +file.ingest=Ingest +file.uningest=Uningest +file.ingest.alreadyIngestedWarning=This file has already been ingested +file.ingest.ingestInProgressWarning=Ingestion of this file is already in progress +file.ingest.cantIngestFileWarning=Ingest not supported for this file type +file.ingest.ingestQueued=Ingestion has been requested +file.ingest.cantUningestFileWarning=This file cannot be uningested +file.uningest.complete=Uningestion of this file has been completed + # editdatafile.xhtml # editFilesFragment.xhtml From 14b280cf39dee856dfa9a1a6e97f1a7392418a02 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 13:19:33 -0500 Subject: [PATCH 0678/1112] doc updates --- doc/sphinx-guides/source/api/native-api.rst | 4 ++++ .../source/user/tabulardataingest/ingestprocess.rst | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index dbe769e2fd1..8cfa5deb96c 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2854,6 +2854,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d true "https://demo.dataverse.org/api/files/:persistentId/restrict?persistentId=doi:10.5072/FK2/AAA000" +.. _file-uningest: + Uningest a File ~~~~~~~~~~~~~~~ @@ -2891,6 +2893,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/uningest?persistentId=doi:10.5072/FK2/AAA000" +.. _file-reingest: + Reingest a File ~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index 9e82ff12b9b..ac5fb5af4ec 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -66,11 +66,11 @@ This is non-fatal. The Dataverse software will not produce a .tab version of the who can see the draft version of the dataset containing the file that will indicate why ingest failed. When the file is published as part of the dataset, there will be no indication that ingest was attempted and failed. -If the warning message is a concern, the Dataverse software includes both an API call (see the Files section of the :doc:`/api/native-api` guide) +If the warning message is a concern, the Dataverse software includes both an API call (see :ref:`file-uningest` in the :doc:`/api/native-api` guide) and an Edit/Uningest menu option displayed on the file page, that allow a file to be Uningested. These are only available to superusers. Uningest will remove the warning. Uningest can also be done for a file that was successfully ingested. This will remove the .tab version of the file that was generated. If a file is a tabular format but was never ingested, .e.g. due to the ingest file size limit being lower in the past, or if ingest had failed, -e.g. in a prior Dataverse version, an reingest API (see the Files section of the :doc:`/api/native-api` guide) and a file page Edit/Reingest option +e.g. in a prior Dataverse version, an reingest API (see :ref:`file-reingest` in the :doc:`/api/native-api` guide) and a file page Edit/Reingest option in the user interface allow ingest to be tried again. As with Uningest, this fucntionality is only available to superusers. From 5dffe36c793fa25b9ee8199fda2104fb26f92b9a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 13:21:09 -0500 Subject: [PATCH 0679/1112] Apply suggestions from code review Co-authored-by: Philip Durbin --- .../source/user/tabulardataingest/ingestprocess.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index ac5fb5af4ec..4dce441de4a 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -61,13 +61,13 @@ Uningest and Reingest ===================== Ingest will only work for files whose content can be interpreted as a table. -Multi-sheets spreadsheets and CSV files with different number of entries per row are two examples where ingest will fail. +Multi-sheet spreadsheets and CSV files with a different number of entries per row are two examples where ingest will fail. This is non-fatal. The Dataverse software will not produce a .tab version of the file and will show a warning to users who can see the draft version of the dataset containing the file that will indicate why ingest failed. When the file is published as part of the dataset, there will be no indication that ingest was attempted and failed. If the warning message is a concern, the Dataverse software includes both an API call (see :ref:`file-uningest` in the :doc:`/api/native-api` guide) -and an Edit/Uningest menu option displayed on the file page, that allow a file to be Uningested. These are only available to superusers. +and an Edit/Uningest menu option displayed on the file page, that allow a file to be uningested. These are only available to superusers. Uningest will remove the warning. Uningest can also be done for a file that was successfully ingested. This will remove the .tab version of the file that was generated. From a828fd1ba8cc6d6c01c818ad1a24db7bcbd624e9 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 14 Feb 2024 14:17:32 -0500 Subject: [PATCH 0680/1112] #10286 add integration tests --- .../edu/harvard/iq/dataverse/api/Files.java | 1 - .../iq/dataverse/util/json/JsonPrinter.java | 3 +- .../iq/dataverse/api/DataversesIT.java | 28 ++++++++ .../edu/harvard/iq/dataverse/api/FilesIT.java | 69 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 16 +++++ 5 files changed, 115 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 155d8953d15..4fd66d45e47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -514,7 +514,6 @@ public Response getFileDataDraft(@Context ContainerRequestContext crc, @PathPara @Path("{id}") public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, @QueryParam("returnOwners") Boolean returnOwners) throws WrappedResponse, Exception { Boolean includeOwners = returnOwners == null ? false : returnOwners; - System.out.print("includeOwners: " + includeOwners); return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, false, includeOwners); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 6f750eaddac..d64f77b3526 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -335,7 +335,8 @@ public static JsonArrayBuilder getOwnersFromDvObject(DvObject dvObject) { } if (dvo.isInstanceofDataset() || dvo.isInstanceofDataFile() ){ if (dvo.getIdentifier() != null){ - ownerObject.add("identifier", dvo.getIdentifier()); + Dataset ds = (Dataset) dvo; + ownerObject.add("identifier", ds.getGlobalId().asString()); } else { ownerObject.add("identifier", dvo.getId()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 78ece6ecc42..e41793a10d5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -145,6 +145,34 @@ public void testMinimalDataverse() throws FileNotFoundException { deleteDataverse.prettyPrint(); deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); } + + + @Test + public void testGetDataverseOwners() throws FileNotFoundException { + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response createDataverse1Response = UtilIT.createRandomDataverse(apiToken); + + createDataverse1Response.prettyPrint(); + createDataverse1Response.then().assertThat().statusCode(CREATED.getStatusCode()); + + String first = UtilIT.getAliasFromResponse(createDataverse1Response); + + Response getWithOwnersFirst = UtilIT.getDataverseWithOwners(first, apiToken, true); + getWithOwnersFirst.prettyPrint(); + + Response createLevel1a = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-level1a", null, apiToken, first); + createLevel1a.prettyPrint(); + String level1a = UtilIT.getAliasFromResponse(createLevel1a); + + Response getWithOwners = UtilIT.getDataverseWithOwners(level1a, apiToken, true); + getWithOwners.prettyPrint(); + + getWithOwners.then().assertThat().body("data.ownerArray[0].identifier", equalTo(first)); + + } /** * A regular user can create a Dataverse Collection and access its diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index cfc6f9335b3..fe9985115e5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1465,6 +1465,75 @@ public void testGetFileInfo() { assertEquals(200, deleteUserResponse.getStatusCode()); } + @Test + public void testGetFileOwners() { + Response createUser = UtilIT.createRandomUser(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + String dataverseAlias = createDataverseGetAlias(apiToken); + + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + String datasetPid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse); + + createUser = UtilIT.createRandomUser(); + String apiTokenRegular = UtilIT.getApiTokenFromResponse(createUser); + + msg("Add a non-tabular file"); + String pathToFile = "scripts/search/data/binary/trees.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + String dataFileId = addResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + msgt("datafile id: " + dataFileId); + + addResponse.prettyPrint(); + + Response getFileDataResponse = UtilIT.getFileWithOwners(dataFileId, apiToken, true); + + getFileDataResponse.prettyPrint(); + getFileDataResponse.then().assertThat() + .body("data.label", equalTo("trees.png")) + .body("data.dataFile.filename", equalTo("trees.png")) + .body("data.dataFile.contentType", equalTo("image/png")) + .body("data.dataFile.filesize", equalTo(8361)) + .statusCode(OK.getStatusCode()); + + getFileDataResponse.then().assertThat().body("data.dataFile.ownerArray[0].identifier", equalTo(datasetPid)); + + // ------------------------- + // Publish dataverse and dataset + // ------------------------- + msg("Publish dataverse and dataset"); + Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + publishDataversetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + //regular user should get to see file data + getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular); + getFileDataResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + //cleanup + /* + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); + assertEquals(200, destroyDatasetResponse.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(username); + assertEquals(200, deleteUserResponse.getStatusCode()); + */ + + } + @Test public void testValidateDDI_issue6027() throws InterruptedException { msgt("testValidateDDI_issue6027"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 0598bb80ea6..0847aea1d37 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1485,6 +1485,22 @@ static Response getDatasetWithOwners(String persistentId, String apiToken, bool + persistentId + (returnOwners ? "&returnOwners=true" : "")); } + + static Response getFileWithOwners(String datafileId, String apiToken, boolean returnOwners) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + + datafileId + + (returnOwners ? "/?returnOwners=true" : "")); + } + + static Response getDataverseWithOwners(String alias, String apiToken, boolean returnOwners) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/dataverses/" + + alias + + (returnOwners ? "/?returnOwners=true" : "")); + } static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) { return given() From 923f02ef4a9aaff8c55f0f6ebc2dbfecad717246 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 14 Feb 2024 14:57:17 -0500 Subject: [PATCH 0681/1112] #10286 delete test data --- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 15 ++++++++++++++- .../edu/harvard/iq/dataverse/api/FilesIT.java | 4 ++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 3703a0d39c3..f4e70e03d45 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1913,8 +1913,21 @@ public void testGetIncludeOwnerArray() { Response getDatasetWithOwners = UtilIT.getDatasetWithOwners(persistentId, apiToken, true); getDatasetWithOwners.prettyPrint(); - getDatasetWithOwners.then().assertThat().body("data.ownerArray[0].identifier", equalTo(dataverseAlias)); + getDatasetWithOwners.then().assertThat().body("data.ownerArray[0].identifier", equalTo(dataverseAlias)); + + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); + assertEquals(200, destroyDatasetResponse.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(username); + assertEquals(200, deleteUserResponse.getStatusCode()); + } + + + /** * In order for this test to pass you must have the Data Capture Module ( diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index fe9985115e5..7ac43bbae94 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1521,7 +1521,7 @@ public void testGetFileOwners() { .statusCode(OK.getStatusCode()); //cleanup - /* + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); assertEquals(200, destroyDatasetResponse.getStatusCode()); @@ -1530,7 +1530,7 @@ public void testGetFileOwners() { Response deleteUserResponse = UtilIT.deleteUser(username); assertEquals(200, deleteUserResponse.getStatusCode()); - */ + } From 70db48f7b51c1f789674fed74ba39d6c6bed80c4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Feb 2024 16:21:20 -0500 Subject: [PATCH 0682/1112] change to require publish to uningest for a problem --- .../edu/harvard/iq/dataverse/FilePage.java | 20 ++++++++++--------- .../webapp/file-edit-button-fragment.xhtml | 2 +- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 37798f1cd3c..909a616a4a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -547,16 +547,17 @@ public String uningestFile() throws CommandException { if (!file.isTabularData()) { //Ingest never succeeded, either there was a failure or this is not a tabular data file - User u = session.getUser(); - if (!u.isAuthenticated() || !u.isSuperuser()) { - logger.warning("User: " + u.getIdentifier() + " tried to uningest a file"); - // Shouldn't happen (choice not displayed for users who don't have the right - // permission), but check anyway - JH.addMessage(FacesMessage.SEVERITY_WARN, - BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); - return null; - } if (file.isIngestProblem()) { + //We allow anyone who can publish to uningest in order to clear a problem + User u = session.getUser(); + if (!u.isAuthenticated() || !(permissionService.permissionsFor(u, file).contains(Permission.PublishDataset))) { + logger.warning("User: " + u.getIdentifier() + " tried to uningest a file"); + // Shouldn't happen (choice not displayed for users who don't have the right + // permission), but check anyway + JH.addMessage(FacesMessage.SEVERITY_WARN, + BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning")); + return null; + } file.setIngestDone(); file.setIngestReport(null); } else { @@ -566,6 +567,7 @@ public String uningestFile() throws CommandException { return null; } } else { + //Superuser required to uningest after a success //Uningest command does it's own check for isSuperuser commandEngine.submit(new UningestFileCommand(dvRequestService.getDataverseRequest(), file)); Long dataFileId = file.getId(); diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index e08de716cda..fd455521c98 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -79,7 +79,7 @@ - +

    )1x7L*Md%t3V25LYV^4}c1C&ARIaJDGUw3eR%Nay8hK zSyjG_eIX2k=oe@u`4R9y`3E$a#u?cq?r?KnCCTa&_GqBzxujb@Z@`8ZcPBeaw1JGA ziSJuZiLr`G7Wlv9rK0!3e^_QpqpP}LVH+bzHW6DI=wU|DI~bR3QCz-|bI8}DiEf08 zPSl+8fI{Cg{{r>$YUiP#VNDRDX68@s^48b=R^ao9?dNqasJMUCJ)?Od>uJsS z=%x+Sgn|XcJwAwXHKXJuzBf_$65>t$QLCmKzNTsrsHSvZbS?zsv)Ha~>zu%&E}dFH z3JLZk$=v9?u46^*R+NLMY2|Zm|M8>A5#1t%7-aP_LjE%RW9I zN5-C6tSvvimmI7=qMnM0OUdy!IYY}0MhyDNY#vpEJ#^GBX(|Y9V;9Zt%FQWtHa2(8 zQ7)z2E8a50kqa7W)g4$X4oEN$=n*_%H2oPg9`ja}*>l*5oR&A2N=U@@W&=M$Sc|Oq zU$mt3W%z|gul+&b2Q;t7@ZlYpBLNEV`}c2d-<_R?OV|mdEOjBS#;GnYF?mTeiBl+@ zJ9q9(AwqQk70`vZ#X_T62l1g;r9s-t$URUGu2>XB65Qq*_Pfva@qA=%WOy9rxg;mG zZ@AHS=*eq(9{rzub4}V(=%iT?Yx7SRU7tmf5he^KqAQk_!gW3#AVm}q$W?}mYE`Dp z@QA7SK7}$go9aSE(~?(l=GU zmT0F4AiIv*N^yUXg{UWqS`+<&2MkL4>YNWQsjD?X=L2x9ZkkM~vBjMe*AXSKd z5qyb9B8`#gQh0x>U~E}pd~+f)Vu#Lv`3D07uHmahOg2ZNwWkL3@7lZfsRB)ZI6g{$ zkJ~4|xitHI@!ZUI$tQHmrghDKHNk_sxpnXDkB=Wf9@LL|S#r}m>AKc|S|HG2>BUcf z$=n`tsl_)fqOLy1CtZJ5Et7GLJ!gbi$F7i9Vl-QEU@&3vO+Hc*vW*adxwV`Z}l;}QR4oY{xc4#HK%|8Bl2ds zbYR++ zE`eJaWJ7iaf$lqLxWK^J_cE@hsJJ-eZ9;*IBe=tKmo1w%xmP%}9m=dX<1xP09=xZEQOh|g++wrf@;u91oKahZ@oyGp~TLx46l{A!&efuT_oOUvP(LR^4iDI ziH;s54n*)duo-Sf2)dPCUXnbD-e~f!&OMO#Y;M-BUazP~G6Lv~uuA%7lq~k>AU_6< zL;eE-YR0eBfoSK;5O@iQnB5`*2Q+j>Z2(@{Pi*cQ6frkrzM}|fn?~zshle)77 znP2oM&T+=rs;{0pdo)_%$3(&p#Lh4>-~X~LB^Y4cpIMAVOu*L(OlIi|V*MWtTMP%w`?R3{vO^d5fI$ zTpEvgBLgmdorR}aW|DLm(g3+AM@1E~edPA2AVCW9W_ur+&rK|oBRPX;7RLi^$TdxI zLccU~_Uz1)OS;-8t0OuWkMNL#G=ro-ajw$j@iG9H`Y`Q!$5R$*J&5|YvwQxcm?j1B zhnDQES2gXYCCdyk2ABl(MtQ`XWBYpWKe^r?;lqy~$aNn%e7cLZC zkgIFT6i$JVt8)i^#1pXh-+#waDzF~;-V=Tt6tSRq^aWfaPed*U%V-sVR!Ym3huf!q zLibiN$@S}pPz}XKa3<7V)r^?D+p!U>;ly1XePH<63E;E-ufgex_ zPUjzZXg$b?QJtKiZ&)cz-t(a#s}d2!sCmI(BL_v3>`5W=5}Ln=bNaP7qM(L>+M&5g zNE?YKScPlLj%#2?$6+zKCue$2{eW*>2I5LkFOVR|HaLf9xm_UBhWPg;g&($p49CWU)Xqlx8U| za6p#@?{zL%o4PVA%kHEz509^ zcFh1Ae2UCc0`8HrK!#QU?M~X!UgrLQ;ghL$1`Xr0mBlSE)ofm&G`(Hx)+yNH{QIZT zGCeOUy2ktjk16@4ig?Aqh^sJM2I%TQWKbVau=A$7<1_*zLh6^*^-aspBew#raa_IE zThPnWe#HB-%kw-7Pi~4#{yQ+Jv2)_Wpe7lIuZJ}}&YmAfePpn)+H4D=0z2KN>0X^$ zruNy^UEc^ff+MF8TocMW0HZh&RiZTQuFhZ zZI^ntiz(6KSV#YLC9mH_^e@u;V{}8vv+d^mc_5XNYQq-g3Y2=46g#9?TyWq@N)%pC zo|rS)R5ETrMiE9VW3OoO>>!;WH`sxDeM{*;f$@OKtYSNHjF3JlQXg&xSEg%lR^61! zhZ|3dLc(A{H(gQQD$^9eovr=x_1eOr{IVh>^yYjr!FEFK$bl}GEV~zQfOCVWE^Z+H ztzq-HOhb@ezObulX$sp!4FmeQ$J7`cJeloI6KDfdgU|!g&HGo`90Ic)kUUO1th*ar ziV!&Dbj(5BgBz1ypw@%*3S$4r+03udC(5DEx{|rc90MYjr2v+M0;aT4eT#B7z{-V; zwtbyPd7&8E#grBlTb!1#&xuTa(nSjPwjg;JAT073a52kQ6EqF}X84Me-_{ZQQ*tR8 zv#H2RSZ($2yU=x@c--TcK2lzQ(`7y~v<`NdI&~^{6~H8JdEFK74*a+{^w52vSP*?c zkcjZjZS;@A6DqRuBAu&~ueeWfbNf8tw{pzq4^va#`xZr8EPQ#s;2+J!$y?4(y6v>k z;_dPdBv9i0h@97FXo9?X|H`3a0#AlgzZgMjtGp%(K+UO52 z(a*PTy)b2{YW|h@Rc=aw4&OD7J~lt_{J~`7;1fZS9jAZy7`7j6OG$`}akciv^Qy@t z0dG4JpJ>?`-kSGD|5Cr#g0_%2*b?o_J7|ZX2F4CZSLGd;|BeKmP8Ijs#2b zD{yxeYbr)8@}1!M{>0EPbF4~+enOkGIx@ig(coz${5QG&!sA<)m}6sIH~Pj54&E@U z(DKQCy_l@)mD5VPo=6(Dwdi~Uz*kxcs$$rzas(54b+)8*?YSR#uyu=}0V0LyQE+Rh z^(U*RGjisJ?>ydPs>{?~nG5SS%@H9-o7$*I4&OEpw#PMobjY@gZuH>+e)F7fEb&uv zv+=ndp0D(6pO(1k{ks~>N^aB;lP1vYm5y~(Gv>a3A9do~kf{6aIdxlZuKAx2tMaIo zQ(QUN?ID!||78s4{?qH*gwk`Hk`Wy}e%fE}k16TF)|EBEYC}EL;F2gM)K+vlbpNE` z$P7!pW!v)%k0tN4>{4!ubxpbPx>W6gg*G-eK#C6yxuaFv?x5>n<!QrpD3tk7RB?y+VC*^9V-c^1+#|+mM&Z%2jsn;$PB% zBvcrPOuM0z>-)XyFmL3@f|!O&`aGIc+*G}*yz!qMTKOIx)98iXdH?K{brf+<@!bra zGn|^72(A_@m%KB*o!@%DGt5{TedczObpy+wt4F$PpY>{&>jNrwkILFnb0JiZO5z-h9vu;YO?+}r?2*;n^wg`= z*q$DyNzH*w`t}XGc+oo3dE6oBLy!|x4^}(!pW8(oYq2l5+k%1*TTc#IlN27aB(jcS z)XzIt?t<`sK|lvB2-A0cqlYiA0N#UHMgAmT)}*;}yHF(m2Nu)pa;|LzHI5!JrFB_& zMH@@KgyetUFT9o)@>R9vMMaEXNAoVq=Qfd|I(Lw}hfV{v^@%%sQLp%1>vDI(!v$Zx z)+&mByYzq8Qe{G|#3tw9rKAa6Wm3u!zgD;W#wa^ z%Xj~qpID{3{Og#kmh8ThkA9KreC*LFKU2jLowa5czrED&;+i!X-ZJ}I!sDP{6V=-O z?6C0YPS3QCV^{jaJPqHY@rklll=4_Pj{2^}Ny8IDvH!T#_BUxWQ~fQ>S9@;I6en--dMw`~(S=sTuM=OtXCr z!Gr=zVg)29jarSaR{#$&X=2MR;)n#2tsUoKV9=H1!M%4T*4p`tj>lGF=A$3@P8lzs zg`F?9Fe%{a$;o$OY9vEUsCFoNuIJ}tm#w15fJB#+e#uFdg&q(viPOZaUFeZRl7W|= zE|Ug;l0ko}*xX%Tw?0B02f!9d_iflrwGB$%#yubPB^%pOWls1yI$>A$Mb`iLowwXrJhiD~VNb%1Y+(p+UJ_z}3 z{N-P6q>tj=blg34jXsY*Khd_8I&+mTO>j zq(io`so+uQ!=~r(kQ`os=SVKv#t4 zuQlN?WN9-78~}Atf2z>4{S|7>nMFbKDsh5zZM;m+y(PzstG|CI!4bj+XAARhB#lBw zQuOM@I8clSaR1_qN9M!PR}sG7!J_Rg`TrDc)zli=OxIE^`y<+YbmFLXOv7lSo#AQz zWB*34eB^zi?dNi(KKe$$u*g^>4@1~u5sA^9v)%;^uEi`v$Eef&gM)CdaChT`BsHO8 zCpImX5*53Qwd2EZr4a$Mn_Gr&QJ;lI<=_+^gkA_hxU7j?;B61RV6hhiyKGl7A2*8L z5S)a?`vq`9A}`=wBum!(f?E$6ypBg1kGB|1u-DL5G9&0@i1|JTRMIeHrJ1)d@k)1z zCoGWullBrZkx8-6&&JF3f&m0%tPKax1wMxiGlJ^`pffq%cl&jYJ^_5m>6N6rc`1J} z3x%s}e?K!omG0=j6}y9)EZtSd+4<&*n@c|(SaRm}l<2J)7UzC!qBve_ovrXSy^m`E zT1ZY|`mXkpc5=4A7BeEnl_vgDnSKb#i-3lsh-i*+h<62Rmsr#uJ$QPckexd1IDh{9 zkLz1nt6i1&OuR}dqVG9>ei#QAgC_?seAq>%wGw?!Wpt#pA1De_P`NW11E~qgGL{Qw zQ~Z>fO2LMyJolT3E-rAxq4grU+6^k>bKsSqn|ZH-#4(deg$O2bY6HWv&Gh4U{MDd= z6tVX98np2e0gWV15}71D1Y8x_FbD2BNQTBdy+0aK-6BQDPMxMvsR=_wu+C5}5rD@} z!2D_e447!MBF1s`+QEd%Y-L* zEAe{koI9;Lv0z=t80V+*zv@KiFHOaHUl##E`(a1q(eVO%ezRn+Q%^}Of{{7G9 z$2uFST9}<0({@S9qD2KMs$ss#R`w?rbo2DE+Tyk5(YtP_(s(>a*6o@NgzG>9)~sw@gb2^OcOIT zpjYj+fep_`6kkwJ`SoN$oCSa6?G)9_Yp)p{*<v# z)4~Z?{GMr*`j*8QXNDMU?hRDmD8@*MDia2l{x`$#>SQt&FziCmm_oAoc(Nq z=8dv|dQr)iGNSVNzE;;KyXtf+jXx4say;EOc!B@s1dnfeSr#4i+AQn-UytdoH_NBO zILo6}pt&ARlI1DoyY#`ON_pH zoYYUy8Zz*Y8stw)U1ye7kxkdDdiLWRn02#R)YL=G>XdnHQ`=x;`qAOR9(o>g74eRa z;G~~7ZC?N1hGSKDdX~QM4S9zGEx03hJ)29#(YBX@=zYNP zt)|V`qrv7729{7&P&>&AKu32{M99Rv08&6DW zRxGbiYgHIoOmq)?E(09r;_YNJRgAF+_)cvoY1^YmU(Wq;fOpq$wqCT@S|c= zf#Nv-=oj4>_9mn#Icdi5?!3k0F)=L}FoS*wxR|jvQPbyVn-n6pm27g50h`~Sy6ILg zeEoe@l1($q=~k*MlCIyu%;nx?fS=*iM89deT^mT!!sXvToDmz)YKUHiqn(3-wSwY? zS}=oALoG5y9b`$TC2g3iUN90+6Ae5=;uBwnU4^e0OHV;;k&nq94a^EvT;=ykve=%x zSjIynmnZoV1%LS5m40K&zh^qd``=pjAgV?5J&fh zD&i&WF5;M+sTm$2Ez<6KxCoRo#QoQskX~j7h$);qh*4^T{sM*$Mr4Z#q2Q*Ckq z;Rx_b#?cpBe0glX)^!`UkaotVrYEy|RVKZRY&XFAOTYxuC^qS8XB0-qM!O%O{v+&@7POaLbu}!*Us%z0kajLVlBql|YDDLd-B}^G3 zxJk`%z)K=8K%P0X%ye<)bE1fK2Aj%3lzQxsX-uUAHJ93)CkxFriL%)O!OfMQpPNk$ z9PldE`&U1NH<72FFl=$xsBi80a3|I&p$;^Lfb1c@7+QFrk$HnxZ_wN>Tjk zRo45Ez140@`WKk#7b@2u*V(e=tNa;Wcl&L>4(FVT`XlqTx4scF(XKpXI1-5BboxLr zujS0}c0iIK?idx7Tl@MLgAmf%LOPO3bj&5?ve0VS{k_V}Lo*D2w@r6Oe`s-HSa*)wHyKpouPn z;T^J?T43){zAsz8`3Pt_@ty6Dy>BYeKXnajd~gR{Iihj?wHHVt7#o^Lv0p0KzB{1YeJ&d#HzpabAb z7ZgV<-Xv7>4@3&aV3t<^RWiVYo>p>MieA0i3=fD3ggF|WV9SYom`Fj4uw^bRv_zfq z3`D?3TPxYyL_EN)!>czxymG@8Vl5AoR>`XVlxs+IuVb@VCidxOF$v~83}QaXeKfN(s-$K8m&*CKX>DB zL_x^~^A~O~X#zR>F$#hu`CYj(25|N{87ZQD=j!{z*w1I+ie{3(yUN*m$!>?gHq)Ay zZ7@5x?GC+dk5v!mtm~87H6_Z(w#wpMr+#iT?Uq06H`Z_JoM=MF@bCeI>3V(b4G5XC zSSOD0Yht%#axw2hEdA0;(?EZw^^}+o`nGKuB^pd`LbBk)g5%e#Xue2`a;*AT6h+u1 zPZT;+fHb5qAkq>EiC$wfFrw>qRq&iE_;geNctM+9aVh;op#uI1bmg4ty3o!IeY!NIkkAtawOLk92)_Qd@o3a_v4^8?X8cC~M) zN!?+Qu-JkF>Fxl-turpP^?U5wFmDjuTFJe1TmMIzzxq)Sm&ths6$njf7+yT5=0`hf z6x5j?iQ^YURP;|kLozl@(&WGkP&T-Vk;lh}F?Fv(=h z(m?9K&rCx)gRfQ+V$jPOhVSKEf>18?IE~GW5MsU}18@N*a*|CcMzc7s&o=r+5{Q)N z94ja#N)dTKXE>E)@r#*}miq?Eujyc|2yi5jP9kvmvLtPUi2Nh#rcrjDoqMgGjIR*< z0I+@rm@9*hCmHSYx(--HIvh$h;l8a`P+U#Z-f*Ftk@>;iZ z{qdKt$~dZ2b?+T45cp*ojx6ze$U;US9+6vh7> z4YoLTaqHJ5KlW1i6EXa@Cun7B+)IXZ{+v` zCIOosjJ}2Cx%~A#39fp{P9a%FrUVnrCe~n?^Q-IjvayJT0K5cjMPg26HK=`a?1QJ$ z$~HZ}ceI0g?za`+^j`IkcYm7QI5+0+Q2(8FJumlVsb@;UWRrh#Y8#c#j3L2C@ac|Ki0_dfnterUe^aYD!V$2qx%Ob>X-u(j0kV(Xqrqp~uwetWBmas=f+`8eka@gQ zTj)-jl4;X+IDPV9fEyy^yF9_n&yp8Kne~Qa?>fqhklQPoG+~YbYVYk~59!=Nu2EW} zA0^wyM6sNy42S?3ou`afnLC6%#s}au@Mp3WH=`Wj2M^|EWrcO#Uhi6nrQ6-$k;zAQ za8xckv*6amLzYiw6ovT>ctPDaJ1xzA$TI2?jw=+CCwWpB894wseI@-F=q3I)TOFek ztHP;ZS(7n2HT2UU(x0U;%t2t1-8vK$Pb{c}#PE@+8_8)Tm}Ul#l1IZ4MyZT6T^vwv z-@a8zyY(tHX!mYXwiz2O-#1M^Flyej5%?>X5-F(U;CCweO1 z7ciwD;UX;MCPPmv*CL3<;JX`+XeN$%r)oGYtF6~*OgahsZkK!&--*pX#&ces?a#24 zGEM*QDx9(sGB||!MqCDpA7_&Eu0R+`DiOLSOpQ0`ifU0_(ZTV_#0m`3{}0~?AcE5m zP5}VQGvq@`f)l(MfD)&fL@>;s-%SjR$XmHQgq=h$W~dr=*FoE($$fw|mgF@yaymn_ zuTq0e21EoK$L2yv>8LRUG$L{vbKV0=2~gas)lo}Ztsz5BA+MmyAw7<0ZAze5Y+llq zWxhBF9x5&Zf{|cNq1nO+B&kOb6?oEX@4j8?;;5AB6+zX#1#x-(3M=Y954jREv94F6 zj#c@}N=^F)Z(8YeA?!-qO9`*c#@kgW&LkZqDUYh;hKZTYP3{@X4?f)~DyT@gDP)8= zEpvbqf@nmW9C&hyKwUk;@J5o0q25kZECn*jv(_<*Wncj`2kV9~uY1sOCAo3ESSb>; zikWl@eF%t*SQ+rWy?|Dmi&jVmiJ=5?rMy1_6D5HGJT8tP^uYfzoe{8+v}}_U2MeKz zBv6NMgFi>bzP|72fea%j%>5d7>;X~|&!1|&QLE93b(>43sO6MyUwu+HAo9!a?PxbP zriY&i3`Hz0QCrjrSGNUzp$HtT>R&N&e_CW4IAl^3x3bgdF$f-zKqw7^7m~L{g^p;E zRQJv3i}@Zwl+m{yVCZBGl1~sTK;W(L%-S=yhq>lD1n#D=!d5?-{BtM?ibjrdA5+CE zRMl^2D{s>M$6H=_vArKM64TZDIyU$98ub`4E>f;}xBw)(3aUi#eg4W8swEx`QY+?V`h?T|v@&j@CU_64?#Nu}!I`mxH4ZnJk56Y@^ zwH92OxZY&yX~$pX?azLmJhC0AJo6Akgr2K#W>UK74u>2pz4-hvp)|*yr9c!<7V=F%jD;ZoP;<85ekR_nb=o{O4ws)k)tfJu3B^y)8(nST9xE zumKilU(T;=*#f}+nU7%?-HtFcOESl2Su!9Mtz=d4udi)6e2U>c*KIzk`qGUKIa1m}UYC+*f9UNM3py@ z5t+ytDGX)zepD%FHBVWdw+^<)Mka(**RJE%W+!^x?$xW;a=snz3s4M zaOgGpCWXf11cU2e26a4$z(EmU6q$Bes4XQQI7Dsk(_e?ztGWKX{KQ*n`B3v(0%K{) z&Ur1o3r2WrXLpT?*m6+y+02Uxo|zkc>m?jL0Abf6(6?K558KT|y)3-)Z$sV=iz7T& zHzvI|-+RTRVXhuti`wU84Ni6VzU+HklI_d&c~NDfn*}N&t`x0{SzEAWpwpBC7k1^A zbdEjqt02R(=E43MW}~ea>D;q2j?=pkIMuE?aVH~$??S%cp>df$e|}hB7t}LE`3k{F zpJ?^IefEu07jr3hPMT`E;Ja1%G7U+CQ ztJEw-@6ku|>F*1lyqnh|)y1XU+e}YbIL2F{bx5=wwdg>suS3A$!}n@_SB{}JmdIW# z6W8O%K6HAz>7CVK<)HHM34I3VzZ##QWz#HA>+Xh+>gdMms;h^3X(?&z+YK0bU)xCM zUGI!R&3_*loe2HH#~%+1s+q z3(d`)cV>O80p;nQ_SvA~_`J)iP3I#oj=A`G3cA=$TiYJ~TQl>ZOT>LW-LS<^?n-lq=}_0hbTDXM=U7FYyfCWO2&m zKO!OHAFW&0Yi0I%-Rut;Kc3Yuazd6_BKd*81!)H)0 z<0s>$96CMd^k$LniPCdiIEnQ87W{VZXH(jJb!4M>N{!&*`b$QJ5q`T$l2I1Q^ z;(yjLYcqJL|5(?*9&5&1c(y(JaC)<8U3B*{9F)0u3*OiTiYku*Y9C79HBBF6-*CI4 zt<(GIX|oOL4Lr6f+-T&Bh8xQ#A?kFB)jw~Lgsk(ky}skX4bL-4b~%ZLdGDP}`%VBv zVZBl5XZ!Uscx+G^8rFNs*=I?QN#1%?W4$P$XRzB+0?V@(#_9I0V&dvp;dzJ@i3=L}C+3Re7PxEy% zCjX*{x3Rqyb9Q-nbmboAsec*my5gCS+PGe@G>b@^>+3$S(Ol%NfBo^nNyG^U<$_`Y zm3I-sFdh!Pkrf3jjaWrzcPQukC$s+jf3buH=g>SBmy~SFUFqk7r0OuQ?t;K(TG%x>|ubpdzz+F;EB9ZBPlF!c9*(= z!5~zms5$of%{0KIr5A>@>#=H3)Rr@khJ~a=bTRn&D0ct52j^Rt9J}^?^$ieTI=W0} zU=WiJKUNGw+e5F)xyvC>!^3Uhc{{#;pFXrVT)BEZJ^+z`L?M0P{(8ITM|kh1K-h9c z<~((Z$JfOq%_SxwRIYNGGOx+V=&4M9g>m3EGOWIkoKa9-vtUQvf)(5I5*pWL)ZJ?0 znKr&nuYKglH;sp0F#Hv(lH~D_0rh1Ia#ZTlu$2bi^?NfW|f=U_J>0fQOCR(`2f&#)4O4ba13vt9b0ZaETy;NP}S4J`oueX?jM0 zePW-X!FiihK!|r3b;67m71BWZr*RRBgAQh!lv0Vyu1(^>Kuo$;`UIs>BEC7>pQ}#1 zLFm<93N}i8OT9pqBaTqgv&Bq^r#JFViS1u8UtGQCt)*JUupr^N&%A5a3JMR_vD?97 zZP(<-3-2|3re*{yH)@T$zolK}>nG-WXQjkX81=U3*s;_1Y(Lb18y$quz{ciV;DXUu z{1!5kEBNc9jMF=;J)-9wP&_S&-L~3tx=p`IrO;cYN0blCgbCS&Xui0iTVJv*e0#>ag#2_F zDh`_>2n^NueOu#7SA8S#mC}{G0raAA2-%n88(>@c=0J5q^eAnRI$jZojLQgBw5L;WZRkGx8T1f5)TNf^u;|=$3^KL|<*n4zEgZ#ls9U9brHSynIi9b+7NQS(wxz`f z2a5pVxnK~Oiz|DPkFlY3z;jfo^iGXiw77}f2Mg4-xiiyFBPs>{;tjI=n)H_hbkixz;9EIK z`3{H+TE&k%Dy!%;V#N+wIZmq7o>vH*7dt*SmoADY4|_At0fO&*&un zNS?qOY2ZN3xHs#vPjBEN1c3BX83OM#bLPK4x``tf2uV$CI&A_?3o&BB?!SRndm@^| z4v9auiWVNdnu$#d(WNptp$>o3+WS`p8y$cB16h>FxcRn4^^1v*O^nMGF(s$-2l>;? zU{zo9af6e8QsuMN2F&(l$_ZDS8$Gd>b9R*R=O*v^9v%V%Io&J5XT+_Q#g5-o1}4n= z^_g#!5Q+8R)I5;TkAzl9S~N|0`{8aHfCenDddeL;rg0uHuF9FbD2NNkoU5y=hV7T}_ktO1NvB9tbal6NiM zG%>eB-Zi6u5q&6=>tvcB^4a-kT(LjLt@^S+0=yD$XIraC z?r)tq^OrJxBC6VHwb^TYu};m)zF3M7-Mv`YqIoPXx!C-IYLL~5rG(%mg%`rtM2DfR`>RwNc%nEQj&* zl=zS49d&ldq>x)3m&z7rQD=S?`T(*=6gj}mkdn5mUbThS<7li+Ns}lOvPS;QK<7)B zsT^W3AksqOLM3rshQBZ}z6yZ-z_6vA`BF#zczX;_vOx7>_mG&s(Z9YL$2-iY;1cl% z=ZO&8Tyfmtk}8V_L>A($UwwtAtMhW3mLTb zhkNbp!J41TB>;w3pA6rQWQA6u53g!W)*onT3SXhbW6CQ^PEBfRsQa9?9Jd_Gk|9T? z>~O|RfK3%aY?BqU{R(9D8omeCO#Af`l_793%loWr(RfBNj^kUEj06~-r{pThXceiI zh35mI6K!@(QfXgtrn85yBT6=@dl6=zY4Zj)n#8fwgz7ts)Lxcuu;mW{ehqA;Z0Kvuv3FK%Omzx2N(z-^c1D`F6mJvJ~T zN(63j0J-l!EXjM*`QeFUDv1rEGPmoh>8BJ&%(qMgnXpNPq!WF4=E;lrbwrQG`z1~< zG_U!o-c4!I9Jq;v#b+f6XGJH(*^V!X;a`xy^@@Sd7eVZDL$q`+#{%sb!a)O>AQ(lZxf?J+b(JLYZ*NPP!ai=y z^gU<|{>-wQZOxbtnaVAA8}H6=bQZWwZc;+!2+P->str!pbeIB zKoSN-;*eBBKdTFCdNSOY!Ji_z$7h3{fx2AoD?Z^J3V#`wdGzQ}suH@dxtux@4!yVM z^jU`jwN*viNTKszIE2jAkVIqqAu_mD{Ps8J8;vlqal6w)f_k7yn8Ud_KFTYEtKc{!PaE^z<^OJAY=m_{}&wg>pubf~F+YLkoyIoxeS8zJr?2{6P6 z-3?(Kym=_#x+Imrf6L^G?=;p=i0J+UbL;4-{zJZqObZ2igd#+4(YsHdc{~Z4X7~p2 zm+_yU{7Lz+95R&u686kz1oIVv4{Z(fvy3gEwq|uSY1XV53cV+DUM0doHaJf*E7gM| zPIN)I6G&r`B2sK3#wVxE<0e1M{U=FJ;GA=bYHHf_1uGit;a{#`@i*~!`NWd$CEhpw zo*YRSeEDWza_S8tCLCrH;AB>HH$0k@_?Rze78oS8YN3AW{i9V&*6NoZkM&m{CMaLJ}?SaE5ckSwAJp+2kPc*Nx7`g4t9(0>c2gi{opw5T2X1l!29M8!d zG#L)Z)Cg8s*3Q`jFrKjb7?lzk(=Zfu$Dz$piaV>aqbw-7BC3aFmy8c^Kyr2;$;CZb zR*7H8>}dok64pCX-I_noen1JU?cL(bYnQM%SPieU+RBDlo0_H?D6x zV?TM)!6%{4j<1e7U7W63uJ&R2dXxMY@w%L^t567JDfKYB@V%-EJw93;hznPy%gsGw z{)C`!J_JnHN7O7vrlz-q(W1JR;z(w~a_=37!{@}di_4W6FEkBEPp{3L3*>@)uMJc* zm)K59$Bc}FyoW+Cl?|Had|lcwo@HdXVsNc5sVN$n1jTkWAItv%zhQC##F(Ne;6y zV?ybFV9;csC}dN`=Xa_dZfegOPaDH<*IP{9Rq>zAtZ?z_8m!&QJnO>N7wn40-lQGeiyeh4L8Oeyj89RG{Yce&?_4%msuYiO1C@WYQf`x5|Z)ir;$LBdhs?RTnS>R zhz?0I_1EBtrfnbp{s2&;eD8o~_6YBg1xRxNi92iHz?k$IG5@Eo?|{d;Z{J1|sjLu6 z8BrM_qhw^Kp;Tt}-V_Rj%*@h4p^S#iY)QySMM`EQt0607Wz>6o-S_i<|L^;M-#$;b z9@lmKuHX2M^E}SuI8H5Fpnh(9Pf_+ow8T%iN8cE!81vJL?w2(|Du}e^E7Gkm5G?{` z!9zgeZv$&)5>0__IRV`z%1LVfP#F|=9?O9*KEBV?@Ar0Mb`Ezk(w`nrEsg|w*xLvn z!^Xx&PcK~B{Jo};)T>nqCsQ8XyFzLHa)F7{TWueD2}#n3p#(+gy zlZYe&mt}e!QyetYe@Jh{U}en zB-J5uwW#@mj1r}u=EIPL_6z0cWk9Z;q%dH>xZ})$geEV(uVdwk zAvSLSbV#`$5nC;>Xaahnp+?#*!YqOe2rOh-x+9t@M{$dbSnfe;s=k|akQ?d9t;aSD z?j6jeNdQJ@SfmLzLichc>WEqZEw|3Y{|gRtw}6nqu15*r321eF*i1lakivR~;5<`7 z5t+UmVIdN&GUDLgi@(+*EZYy~C(=-hxB-%w4qPbD`;>DGCLJ1aBOKy>OJ3+yRfjQ- z=!Ll5!?>=b`TPBq4!=YZe2H=u(954LY1fJ2r!LfxUs(n6B|R0usBFB0o+)^lfb@4 z;koG!o+Tmj5EUS7L@{~fUsHVmEenSJBhMSrxe7R(y+^kW4T}HRYcUxh$i;)9Vc^dC zmhxDFSS-O;2wW;GZDbohHPVTFl?3Tfg_0(nkYc)2E}tVfBuH3bJ7;)`133qi07c)h z=;%7|OpxCayCB$k9{|n@D{mWI2Z`75O<-?nP3<0a*|B!?)j8&&$9#>4FB|L@-&=jq zB8D>i8$)E+$xzb|!(w)cwQKC!u0W_lx^A7DJzLq1pbE5kls;52BoSi}Y9gYuv(0Mk z&A2HD#y^(0ZpDowHw5?@*;)_-?gg_CA+#+#fAHMF904u~yc7W}R!(v^u}do`a3e1R zbRB|y4MZ1*I7;Ftis~y&rZ;a&F`5XM5_~a2z(7G3?L10J zMBj9jcx6K&yOKa|kwuvjaF-dfSQ6wQLLu=hGCC4;0ae2xAFCAq@Q_7lp#gpXX_f)z z195rA+&blxC1i<931`qJkA4O3eh>CMSeu7{P>)jqPT` z7FkvNf>^78d5?O10XnAw&mo&1M#gqLHQW}`&4f^Pr8AATso5GZb~0qFlAx1NL~#(%I@L`Wuo zN3>IkP0FADNTY8jjePq6QZt3ZrX`?|k={c%1#~O;RHY``k=jKejtp3z;X7`@Bk$?-E=OpiN zHWWVNq2xNT#p&%$y6Jge;g9N0J3X~RA~@IR2_PuSRMO?F^ozdbTjQLqd)jr~alNNc zWj+=^lrvtGT<4gxWNiOTLCpB*^7@h7k@@i%SP8Xdj);yVUND+lp!6>tlZ+LdE0=Cm zshQN{x*Pe7{<5Q&(0RR0b7dd(z2Sr|z;mYZr*TK^^2HweoKIY_#wWk_%c|E*wb)6L zM*u=Y0%1WR@|JQPUzCo02xm%Q0}l01R%%9Q-*j)Wv|xRUqrL`LW?W9WbO*0T86k(j zEz95!;>3zNx@!(=Hj4;1sk40?D-mZJ{nMTJ%GpI)T&jG3f_Ml6imoq^A@!k+!$4KH zzzt33L+;I8FUoqBZTqb#-@B!T#I|#>#7E0daD__EoUn^O=h)OBpB=g=S^T;6@6oT% z%x_p)e(mhhz9g`IFz9PjJZS+|8K@Wnq%^tjHEYy(*{3 z+dW(dBnE7KN+jn{gxXVOvWUmC)*mg>Fv*=?TDbtC`!NClp*vm58-Devhr_uLL^@ixZ?E9p3k>=^((I15| z(()g3heo$ECNeX-73$lZ%vkm8lZTvIz`?EIXXIx#bx#G8?|;E@lzus;>;c6{4(TWN z{zYfv#IpGs@c;pu5H5d)<-wHgJSo?b%e8ENeBKq9r4<@%f97sVtbyirP!(R~FWr!d zprDZ7si~@T>eEmS1~i1BvB%PVfS{-DKzRyjGdNW60Uix9PHTo*Me9+)>7)d5N7p8` ze#~si%6tEm?V}dc^MMo*#zf8y21+0RCvqq@ydIxieM{@g#;$#WA8Pxl_FET8&rHH( zlGsL)ZA`W&ydK6N0G`5u%o?!8IqM%a>ocnH5Sn607rxq{u{V=yanVABV-CF8YGZ7qP##sj!jz5Bf~(h zMI1kG>6yT%5I8y-Z^+2R0ggp0m~dpAy}kFKiUHV-ojqvbVt)4+@c|~Sz>(>dEL^+; zOJ>s0$Pd1+*!oZ#0{vVRY+Jw~-P1-l`NKi7hvD*5h?KgMWJsSsUMYP=9pes@l3<%3 zjdV;mbwi7+{qevyNJ{#tbab+Q{%G>z!v0-5vOQBBC#+yMj|}Lzj!qp6UP)I7;NGcF zob8B6H54!&nYdg8?O0o2JnF*;FHwwkFv1fFSr2nVWLlG~NWv3NWrY zXw4+pIs5exNzj3Jw@SEwO)>v1wXTO#K8h2aYy=S=8A!f}VHj#X)E}Aott|I98PFQ5Ht=VhNWc-ib7it=GOnzhf zXNrBrWS6+@z!&@km>a;LN@vqhU7yzp!2sOCWSQOy~V9$xM z@*3pRgo{;W^romL0D}IvD;1huoWZ?03vx636G``6QpyB?i{Og0B_g@J5n5rS2fz-i

    -
    +
    diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml index 41e2807c4fd..7f70f28e194 100644 --- a/src/main/webapp/dataverse.xhtml +++ b/src/main/webapp/dataverse.xhtml @@ -283,6 +283,19 @@
    +
    + + #{bundle.pidProviderOption} + + +
    + + + + +
    +
    diff --git a/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java b/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java index 394f08c6e93..7065e9689e1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java @@ -4,6 +4,8 @@ import org.junit.jupiter.api.Test; import edu.harvard.iq.dataverse.pidproviders.PidUtil; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; +import edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -19,7 +21,7 @@ public class GlobalIdTest { @Test public void testValidDOI() { System.out.println("testValidDOI"); - GlobalId instance = new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", DOIServiceBean.DOI_RESOLVER_URL, null); + GlobalId instance = new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null); assertEquals("doi", instance.getProtocol()); assertEquals("10.5072", instance.getAuthority()); @@ -30,7 +32,7 @@ public void testValidDOI() { @Test public void testValidHandle() { System.out.println("testValidDOI"); - GlobalId instance = new GlobalId(HandlenetServiceBean.HDL_PROTOCOL, "1902.1","111012", "/", HandlenetServiceBean.HDL_RESOLVER_URL, null); + GlobalId instance = new GlobalId(HandlePidProvider.HDL_PROTOCOL, "1902.1","111012", "/", HandlePidProvider.HDL_RESOLVER_URL, null); assertEquals("hdl", instance.getProtocol()); assertEquals("1902.1", instance.getAuthority()); @@ -57,7 +59,7 @@ public void testInject() { System.out.println("testInject (weak test)"); // String badProtocol = "hdl:'Select value from datasetfieldvalue';/ha"; - GlobalId instance = PidUtil.parseAsGlobalID(HandlenetServiceBean.HDL_PROTOCOL, "'Select value from datasetfieldvalue';", "ha"); + GlobalId instance = PidUtil.parseAsGlobalID(HandlePidProvider.HDL_PROTOCOL, "'Select value from datasetfieldvalue';", "ha"); assertNull(instance); //exception.expect(IllegalArgumentException.class); diff --git a/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java deleted file mode 100644 index 542d00d0d78..00000000000 --- a/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package edu.harvard.iq.dataverse; - -import edu.harvard.iq.dataverse.engine.TestCommandContext; -import edu.harvard.iq.dataverse.engine.command.CommandContext; -import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean; -import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.MockitoAnnotations; -import org.mockito.junit.jupiter.MockitoExtension; - - -import static org.junit.jupiter.api.Assertions.*; - -/** - * - * @author michael - */ -@ExtendWith(MockitoExtension.class) -public class PersistentIdentifierServiceBeanTest { - - @Mock - private SettingsServiceBean settingsServiceBean; - - @InjectMocks - DOIEZIdServiceBean ezidServiceBean = new DOIEZIdServiceBean(); - @InjectMocks - DOIDataCiteServiceBean dataCiteServiceBean = new DOIDataCiteServiceBean(); - @InjectMocks - FakePidProviderServiceBean fakePidProviderServiceBean = new FakePidProviderServiceBean(); - HandlenetServiceBean hdlServiceBean = new HandlenetServiceBean(); - PermaLinkPidProviderServiceBean permaLinkServiceBean = new PermaLinkPidProviderServiceBean(); - - CommandContext ctxt; - - @BeforeEach - public void setup() { - MockitoAnnotations.initMocks(this); - ctxt = new TestCommandContext(){ - @Override - public HandlenetServiceBean handleNet() { - return hdlServiceBean; - } - - @Override - public DOIDataCiteServiceBean doiDataCite() { - return dataCiteServiceBean; - } - - @Override - public DOIEZIdServiceBean doiEZId() { - return ezidServiceBean; - } - - @Override - public FakePidProviderServiceBean fakePidProvider() { - return fakePidProviderServiceBean; - } - - @Override - public PermaLinkPidProviderServiceBean permaLinkProvider() { - return permaLinkServiceBean; - } - - }; - } - - /** - * Test of getBean method, of class PersistentIdentifierServiceBean. - */ - @Test - public void testGetBean_String_CommandContext_OK() { - ctxt.settings().setValueForKey( SettingsServiceBean.Key.DoiProvider, "EZID"); - Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("EZID"); - - assertEquals(ezidServiceBean, - GlobalIdServiceBean.getBean("doi", ctxt)); - - ctxt.settings().setValueForKey( SettingsServiceBean.Key.DoiProvider, "DataCite"); - Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("DataCite"); - - assertEquals(dataCiteServiceBean, - GlobalIdServiceBean.getBean("doi", ctxt)); - - ctxt.settings().setValueForKey(SettingsServiceBean.Key.DoiProvider, "FAKE"); - Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("FAKE"); - - assertEquals(fakePidProviderServiceBean, - GlobalIdServiceBean.getBean("doi", ctxt)); - - assertEquals(hdlServiceBean, - GlobalIdServiceBean.getBean("hdl", ctxt)); - - assertEquals(permaLinkServiceBean, - GlobalIdServiceBean.getBean("perma", ctxt)); - } - - @Test - public void testGetBean_String_CommandContext_BAD() { - ctxt.settings().setValueForKey( SettingsServiceBean.Key.DoiProvider, "non-existent-provider"); - assertNull(GlobalIdServiceBean.getBean("doi", ctxt)); - - - assertNull(GlobalIdServiceBean.getBean("non-existent-protocol", ctxt)); - } - - /** - * Test of getBean method, of class PersistentIdentifierServiceBean. - */ - @Test - public void testGetBean_CommandContext() { - ctxt.settings().setValueForKey( SettingsServiceBean.Key.Protocol, "doi"); - ctxt.settings().setValueForKey( SettingsServiceBean.Key.DoiProvider, "EZID"); - Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("EZID"); - - assertEquals(ezidServiceBean, - GlobalIdServiceBean.getBean("doi", ctxt)); - - ctxt.settings().setValueForKey( SettingsServiceBean.Key.Protocol, "hdl"); - assertEquals(hdlServiceBean, - GlobalIdServiceBean.getBean("hdl", ctxt)); - - ctxt.settings().setValueForKey( SettingsServiceBean.Key.Protocol, "perma"); - assertEquals(permaLinkServiceBean, - GlobalIdServiceBean.getBean("perma", ctxt)); - } - - -} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 304b0bd0438..c3036deb122 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1217,6 +1217,10 @@ public void testExcludeEmail() { } + @Disabled + /*The identifier generation style is no longer a global, dynamically changeable setting. To make this test work after PR #10234, + * will require configuring a PidProvider that uses this style and creating a collection/dataset that uses that provider. + */ @Test public void testStoredProcGeneratedAsIdentifierGenerationStyle() { // Please note that this test only works if the stored procedure diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java index ad980aa28cd..d173f65757f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java @@ -3,11 +3,11 @@ */ package edu.harvard.iq.dataverse.dataaccess; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.mocks.MocksFactory; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -94,8 +94,8 @@ public static void tearDown() { void testGlobusOverlayIdentifiers() throws IOException { dataset = MocksFactory.makeDataset(); - dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", - DOIServiceBean.DOI_RESOLVER_URL, null)); + dataset.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, authority, identifier, "/", + AbstractDOIProvider.DOI_RESOLVER_URL, null)); mDatafile = MocksFactory.makeDataFile(); mDatafile.setOwner(dataset); mDatafile.setStorageIdentifier("globusm://" + baseStoreId1); diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java index 1c371881ba6..2c0e0a5c6b7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java @@ -4,11 +4,11 @@ */ package edu.harvard.iq.dataverse.dataaccess; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.mocks.MocksFactory; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import org.junit.jupiter.api.AfterEach; @@ -50,7 +50,7 @@ public void setUp() { System.setProperty("dataverse.files.file.label", "default"); datafile = MocksFactory.makeDataFile(); dataset = MocksFactory.makeDataset(); - dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", DOIServiceBean.DOI_RESOLVER_URL, null)); + dataset.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, authority, identifier, "/", AbstractDOIProvider.DOI_RESOLVER_URL, null)); datafile.setOwner(dataset); datafile.setStorageIdentifier("test://" + baseStoreId + "//" + logoPath); diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java index a80adb33b8d..255125189ae 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java @@ -11,8 +11,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; -import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean; -import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean; +import edu.harvard.iq.dataverse.pidproviders.PidProviderFactoryBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.search.IndexBatchServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; @@ -122,27 +121,7 @@ public DataverseFieldTypeInputLevelServiceBean fieldTypeInputLevels() { } @Override - public DOIEZIdServiceBean doiEZId() { - return null; - } - - @Override - public DOIDataCiteServiceBean doiDataCite() { - return null; - } - - @Override - public FakePidProviderServiceBean fakePidProvider() { - return null; - } - - @Override - public HandlenetServiceBean handleNet() { - return null; - } - - @Override - public PermaLinkPidProviderServiceBean permaLinkProvider() { + public PidProviderFactoryBean pidProviderFactory() { return null; } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java index 4fc84f7e72d..8ebdeea6243 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java @@ -7,12 +7,13 @@ import com.google.gson.Gson; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.HandlenetServiceBean; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; import edu.harvard.iq.dataverse.export.openaire.OpenAireExportUtil; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; +import edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider; + import java.io.IOException; import java.io.StringWriter; import java.nio.charset.StandardCharsets; @@ -56,7 +57,7 @@ public void testWriteIdentifierElementDoi() throws XMLStreamException { String persistentAgency = "doi"; String persistentAuthority = "10.123"; String persistentId = "123"; - GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId, null, DOIServiceBean.DOI_RESOLVER_URL, null); + GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId, null, AbstractDOIProvider.DOI_RESOLVER_URL, null); // when OpenAireExportUtil.writeIdentifierElement(xmlWriter, globalId.asURL(), null); @@ -76,7 +77,7 @@ public void testWriteIdentifierElementHandle() throws XMLStreamException { String persistentAgency = "hdl"; String persistentAuthority = "1902.1"; String persistentId = "111012"; - GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId, null, HandlenetServiceBean.HDL_RESOLVER_URL, null); + GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId, null, HandlePidProvider.HDL_RESOLVER_URL, null); // when OpenAireExportUtil.writeIdentifierElement(xmlWriter, globalId.asURL(), null); diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index 6f0132e2bc9..639a7c542c4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.externaltools; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; @@ -9,6 +8,7 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; @@ -267,7 +267,7 @@ public void testDatasetConfigureTool() { .build().toString()); var dataset = new Dataset(); - dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.5072", "ABC123", null, DOIServiceBean.DOI_RESOLVER_URL, null)); + dataset.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, "10.5072", "ABC123", null, AbstractDOIProvider.DOI_RESOLVER_URL, null)); ApiToken nullApiToken = null; String nullLocaleCode = "en"; var externalToolHandler = new ExternalToolHandler(externalTool, dataset, nullApiToken, nullLocaleCode); diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java index 4f5af8b97b0..bb39aecfa79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.externaltools; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.DataTable; @@ -9,6 +8,7 @@ import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.util.URLTokenUtil; import java.util.ArrayList; @@ -144,7 +144,7 @@ public void testParseAddFileToolFilePid() { assertEquals("explorer", externalTool.getToolName()); DataFile dataFile = new DataFile(); dataFile.setId(42l); - dataFile.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.5072","FK2/RMQT6J/G9F1A1", "/", DOIServiceBean.DOI_RESOLVER_URL, null)); + dataFile.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2/RMQT6J/G9F1A1", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null)); FileMetadata fmd = new FileMetadata(); fmd.setId(2L); DatasetVersion dv = new DatasetVersion(); diff --git a/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java index 56f8731b9c8..095e798f229 100644 --- a/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java @@ -13,7 +13,6 @@ import org.mockito.Mock; import org.mockito.Mockito; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.GlobalId; @@ -21,6 +20,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore; import edu.harvard.iq.dataverse.mocks.MocksFactory; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.json.JsonObject; @@ -52,8 +52,8 @@ public void setUp() { "d7c42580-6538-4605-9ad8-116a61982644/hdc1"); dataset = MocksFactory.makeDataset(); - dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", - DOIServiceBean.DOI_RESOLVER_URL, null)); + dataset.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, authority, identifier, "/", + AbstractDOIProvider.DOI_RESOLVER_URL, null)); mDatafile = MocksFactory.makeDataFile(); mDatafile.setOwner(dataset); mDatafile.setStorageIdentifier("globusm://" + baseStoreId1); diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java index dabc7f68fce..dc226d2e85b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java @@ -1,18 +1,43 @@ package edu.harvard.iq.dataverse.pidproviders; -import edu.harvard.iq.dataverse.DOIServiceBean; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; +import edu.harvard.iq.dataverse.pidproviders.doi.UnmanagedDOIProvider; +import edu.harvard.iq.dataverse.pidproviders.doi.datacite.DataCiteDOIProvider; +import edu.harvard.iq.dataverse.pidproviders.doi.datacite.DataCiteProviderFactory; +import edu.harvard.iq.dataverse.pidproviders.doi.ezid.EZIdDOIProvider; +import edu.harvard.iq.dataverse.pidproviders.doi.ezid.EZIdProviderFactory; +import edu.harvard.iq.dataverse.pidproviders.doi.fake.FakeDOIProvider; +import edu.harvard.iq.dataverse.pidproviders.doi.fake.FakeProviderFactory; +import edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider; +import edu.harvard.iq.dataverse.pidproviders.handle.HandleProviderFactory; +import edu.harvard.iq.dataverse.pidproviders.handle.UnmanagedHandlePidProvider; +import edu.harvard.iq.dataverse.pidproviders.perma.PermaLinkPidProvider; +import edu.harvard.iq.dataverse.pidproviders.perma.PermaLinkProviderFactory; +import edu.harvard.iq.dataverse.pidproviders.perma.UnmanagedPermaLinkPidProvider; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; + import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import jakarta.json.Json; import jakarta.json.JsonObjectBuilder; import jakarta.ws.rs.NotFoundException; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; @@ -24,32 +49,128 @@ import static org.junit.jupiter.api.Assertions.*; -/** - * Useful for testing but requires DataCite credentials, etc. - */ + @ExtendWith(MockitoExtension.class) +@LocalJvmSettings +//Perma 1 +@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "perma 1", varArgs = "perma1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = PermaLinkPidProvider.TYPE, varArgs = "perma1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "DANSLINK", varArgs = "perma1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "QE", varArgs = "perma1") +@JvmSetting(key = JvmSettings.PERMALINK_SEPARATOR, value = "-", varArgs = "perma1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_EXCLUDED_LIST, value = "perma:DANSLINKQE123456, perma:bad, perma:LINKIT123456", varArgs ="perma1") + +//Perma 2 +@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "perma 2", varArgs = "perma2") +@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = PermaLinkPidProvider.TYPE, varArgs = "perma2") +@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "DANSLINK", varArgs = "perma2") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "QE", varArgs = "perma2") +@JvmSetting(key = JvmSettings.PID_PROVIDER_MANAGED_LIST, value = "perma:LINKIT/FK2ABCDEF", varArgs ="perma2") +@JvmSetting(key = JvmSettings.PERMALINK_SEPARATOR, value = "/", varArgs = "perma2") +@JvmSetting(key = JvmSettings.PERMALINK_BASE_URL, value = "https://example.org/123", varArgs = "perma2") +// Datacite 1 +@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "dataCite 1", varArgs = "dc1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = DataCiteDOIProvider.TYPE, varArgs = "dc1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5073", varArgs = "dc1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK2", varArgs = "dc1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_EXCLUDED_LIST, value = "doi:10.5073/FK2123456", varArgs ="dc1") +@JvmSetting(key = JvmSettings.DATACITE_MDS_API_URL, value = "https://mds.test.datacite.org/", varArgs = "dc1") +@JvmSetting(key = JvmSettings.DATACITE_REST_API_URL, value = "https://api.test.datacite.org", varArgs ="dc1") +@JvmSetting(key = JvmSettings.DATACITE_USERNAME, value = "test", varArgs ="dc1") +@JvmSetting(key = JvmSettings.DATACITE_PASSWORD, value = "changeme", varArgs ="dc1") +//Datacite 2 +@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "dataCite 2", varArgs = "dc2") +@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = DataCiteDOIProvider.TYPE, varArgs = "dc2") +@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5072", varArgs = "dc2") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK3", varArgs = "dc2") +@JvmSetting(key = JvmSettings.DATACITE_MDS_API_URL, value = "https://mds.test.datacite.org/", varArgs = "dc2") +@JvmSetting(key = JvmSettings.DATACITE_REST_API_URL, value = "https://api.test.datacite.org", varArgs ="dc2") +@JvmSetting(key = JvmSettings.DATACITE_USERNAME, value = "test2", varArgs ="dc2") +@JvmSetting(key = JvmSettings.DATACITE_PASSWORD, value = "changeme2", varArgs ="dc2") +//EZID 1 +@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "EZId 1", varArgs = "ez1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = EZIdDOIProvider.TYPE, varArgs = "ez1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5072", varArgs = "ez1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK2", varArgs = "ez1") +@JvmSetting(key = JvmSettings.EZID_API_URL, value = "https://ezid.cdlib.org/", varArgs = "ez1") +@JvmSetting(key = JvmSettings.EZID_USERNAME, value = "apitest", varArgs ="ez1") +@JvmSetting(key = JvmSettings.EZID_PASSWORD, value = "apitest", varArgs ="ez1") +//FAKE 1 +@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "FAKE 1", varArgs = "fake1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = FakeDOIProvider.TYPE, varArgs = "fake1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5074", varArgs = "fake1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK", varArgs = "fake1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_MANAGED_LIST, value = "doi:10.5073/FK3ABCDEF", varArgs ="fake1") + +//HANDLE 1 +@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "HDL 1", varArgs = "hdl1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = HandlePidProvider.TYPE, varArgs = "hdl1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "20.500.1234", varArgs = "hdl1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "", varArgs = "hdl1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_MANAGED_LIST, value = "hdl:20.20.20/FK2ABCDEF", varArgs ="hdl1") +@JvmSetting(key = JvmSettings.HANDLENET_AUTH_HANDLE, value = "20.500.1234/ADMIN", varArgs ="hdl1") +@JvmSetting(key = JvmSettings.HANDLENET_INDEPENDENT_SERVICE, value = "true", varArgs ="hdl1") +@JvmSetting(key = JvmSettings.HANDLENET_INDEX, value = "1", varArgs ="hdl1") +@JvmSetting(key = JvmSettings.HANDLENET_KEY_PASSPHRASE, value = "passphrase", varArgs ="hdl1") +@JvmSetting(key = JvmSettings.HANDLENET_KEY_PATH, value = "/tmp/cred", varArgs ="hdl1") + +//List to instantiate +@JvmSetting(key = JvmSettings.PID_PROVIDERS, value = "perma1, perma2, dc1, dc2, ez1, fake1, hdl1") + public class PidUtilTest { + @Mock private SettingsServiceBean settingsServiceBean; - @InjectMocks - private PermaLinkPidProviderServiceBean p = new PermaLinkPidProviderServiceBean(); - + @BeforeAll + //FWIW @JvmSetting doesn't appear to work with @BeforeAll + public static void setUpClass() throws Exception { + + //This mimics the initial config in the PidProviderFactoryBean.loadProviderFactories method - could potentially be used to mock that bean at some point + Map pidProviderFactoryMap = new HashMap<>(); + pidProviderFactoryMap.put(PermaLinkPidProvider.TYPE, new PermaLinkProviderFactory()); + pidProviderFactoryMap.put(DataCiteDOIProvider.TYPE, new DataCiteProviderFactory()); + pidProviderFactoryMap.put(HandlePidProvider.TYPE, new HandleProviderFactory()); + pidProviderFactoryMap.put(FakeDOIProvider.TYPE, new FakeProviderFactory()); + pidProviderFactoryMap.put(EZIdDOIProvider.TYPE, new EZIdProviderFactory()); + + PidUtil.clearPidProviders(); + + //Read list of providers to add + List providers = Arrays.asList(JvmSettings.PID_PROVIDERS.lookup().split(",\\s")); + //Iterate through the list of providers and add them using the PidProviderFactory of the appropriate type + for (String providerId : providers) { + System.out.println("Loading provider: " + providerId); + String type = JvmSettings.PID_PROVIDER_TYPE.lookup(providerId); + PidProviderFactory factory = pidProviderFactoryMap.get(type); + PidUtil.addToProviderList(factory.createPidProvider(providerId)); + } + PidUtil.addAllToUnmanagedProviderList(Arrays.asList(new UnmanagedDOIProvider(), + new UnmanagedHandlePidProvider(), new UnmanagedPermaLinkPidProvider())); + } + + @AfterAll + public static void tearDownClass() throws Exception { + PidUtil.clearPidProviders(); + } + @BeforeEach public void initMocks() { MockitoAnnotations.initMocks(this); - Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol)).thenReturn("perma"); - Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority)).thenReturn("DANSLINK"); - p.reInit(); +// Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol)).thenReturn("perma"); +// Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority)).thenReturn("DANSLINK"); } + /** + * Useful for testing but requires DataCite credentials, etc. + */ @Disabled @Test public void testGetDoi() throws IOException { String username = System.getenv("DataCiteUsername"); String password = System.getenv("DataCitePassword"); String baseUrl = "https://api.test.datacite.org"; - GlobalId pid = new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.70122","QE5A-XN55", "/", DOIServiceBean.DOI_RESOLVER_URL, null); + GlobalId pid = new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.70122","QE5A-XN55", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null); try { JsonObjectBuilder result = PidUtil.queryDoi(pid, baseUrl, username, password); String out = JsonUtil.prettyPrint(result.build()); @@ -58,23 +179,291 @@ public void testGetDoi() throws IOException { System.out.println("ex: " + ex); } } + @Test - public void testGetPermaLink() throws IOException { - List list = new ArrayList(); + public void testFactories() throws IOException { + PidProvider p = PidUtil.getPidProvider("perma1"); + assertEquals("perma 1", p.getLabel()); + assertEquals(PermaLinkPidProvider.PERMA_PROTOCOL, p.getProtocol()); + assertEquals("DANSLINK", p.getAuthority()); + assertEquals("QE", p.getShoulder()); + assertEquals("-", p.getSeparator()); + assertTrue(p.getUrlPrefix().startsWith(SystemConfig.getDataverseSiteUrlStatic())); + p = PidUtil.getPidProvider("perma2"); + assertTrue(p.getUrlPrefix().startsWith("https://example.org/123")); + p = PidUtil.getPidProvider("dc2"); + assertEquals("FK3", p.getShoulder()); + } + + @Test + public void testPermaLinkParsing() throws IOException { + //Verify that we can parse a valid perma link associated with perma1 + String pid1String = "perma:DANSLINK-QE-5A-XN55"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + //Check that it was parsed by perma1 and that the URL is correct, etc + assertEquals("perma1", pid2.getProviderId()); + assertEquals(SystemConfig.getDataverseSiteUrlStatic() + "/citation?persistentId=" + pid1String, pid2.asURL()); + assertEquals("DANSLINK", pid2.getAuthority()); + assertEquals(PermaLinkPidProvider.PERMA_PROTOCOL, pid2.getProtocol()); + + //Verify that parsing the URL form works + GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); + assertEquals(pid1String, pid3.asString()); + assertEquals("perma1", pid3.getProviderId()); - list.add(p); - PidUtil.addAllToProviderList(list); - GlobalId pid = new GlobalId(PermaLinkPidProviderServiceBean.PERMA_PROTOCOL,"DANSLINK","QE5A-XN55", "", p.getUrlPrefix(), PermaLinkPidProviderServiceBean.PERMA_PROVIDER_NAME); - System.out.println(pid.asString()); - System.out.println(pid.asURL()); + //Repeat the basics with a permalink associated with perma2 + String pid4String = "perma:DANSLINK/QE-5A-XN55"; + GlobalId pid5 = PidUtil.parseAsGlobalID(pid4String); + assertEquals("perma2", pid5.getProviderId()); + assertEquals(pid4String, pid5.asString()); + assertEquals("https://example.org/123/citation?persistentId=" + pid4String, pid5.asURL()); + + } + + @Test + public void testDOIParsing() throws IOException { - GlobalId pid2 = PidUtil.parseAsGlobalID(pid.asString()); - assertEquals(pid.asString(), pid2.asString()); - GlobalId pid3 = PidUtil.parseAsGlobalID(pid.asURL()); - assertEquals(pid.asString(), pid3.asString()); + String pid1String = "doi:10.5073/FK2ABCDEF"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + assertEquals("dc1", pid2.getProviderId()); + assertEquals("https://doi.org/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); + assertEquals("10.5073", pid2.getAuthority()); + assertEquals(AbstractDOIProvider.DOI_PROTOCOL, pid2.getProtocol()); + GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); + assertEquals(pid1String, pid3.asString()); + assertEquals("dc1", pid3.getProviderId()); + + String pid4String = "doi:10.5072/FK3ABCDEF"; + GlobalId pid4 = PidUtil.parseAsGlobalID(pid4String); + assertEquals(pid4String, pid4.asString()); + assertEquals("dc2", pid4.getProviderId()); + + String pid5String = "doi:10.5072/FK2ABCDEF"; + GlobalId pid5 = PidUtil.parseAsGlobalID(pid5String); + assertEquals(pid5String, pid5.asString()); + assertEquals("ez1", pid5.getProviderId()); + String pid6String = "doi:10.5074/FKABCDEF"; + GlobalId pid6 = PidUtil.parseAsGlobalID(pid6String); + assertEquals(pid6String, pid6.asString()); + assertEquals("fake1", pid6.getProviderId()); + + + } + + @Test + public void testHandleParsing() throws IOException { + + String pid1String = "hdl:20.500.1234/10052"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + assertEquals("hdl1", pid2.getProviderId()); + assertEquals("https://hdl.handle.net/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); + assertEquals("20.500.1234", pid2.getAuthority()); + assertEquals(HandlePidProvider.HDL_PROTOCOL, pid2.getProtocol()); + GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); + assertEquals(pid1String, pid3.asString()); + assertEquals("hdl1", pid3.getProviderId()); } + @Test + public void testUnmanagedParsing() throws IOException { + // A handle managed not managed in the hdl1 provider + String pid1String = "hdl:20.500.3456/10052"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + //Only parsed by the unmanaged provider + assertEquals(UnmanagedHandlePidProvider.ID, pid2.getProviderId()); + assertEquals(HandlePidProvider.HDL_RESOLVER_URL + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); + assertEquals("20.500.3456", pid2.getAuthority()); + assertEquals(HandlePidProvider.HDL_PROTOCOL, pid2.getProtocol()); + GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); + assertEquals(pid1String, pid3.asString()); + assertEquals(UnmanagedHandlePidProvider.ID, pid3.getProviderId()); + + //Same for DOIs + String pid5String = "doi:10.6083/FK2ABCDEF"; + GlobalId pid5 = PidUtil.parseAsGlobalID(pid5String); + assertEquals(pid5String, pid5.asString()); + assertEquals(UnmanagedDOIProvider.ID, pid5.getProviderId()); + + //And Permalinks + String pid6String = "perma:NOTDANSQEABCDEF"; + GlobalId pid6 = PidUtil.parseAsGlobalID(pid6String); + assertEquals(pid6String, pid6.asString()); + assertEquals(UnmanagedPermaLinkPidProvider.ID, pid6.getProviderId()); + + } + + @Test + public void testExcludedSetParsing() throws IOException { + + String pid1String = "doi:10.5073/FK2123456"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + assertEquals(UnmanagedDOIProvider.ID, pid2.getProviderId()); + assertEquals("https://doi.org/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); + assertEquals("10.5073", pid2.getAuthority()); + assertEquals(AbstractDOIProvider.DOI_PROTOCOL, pid2.getProtocol()); + GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); + assertEquals(pid1String, pid3.asString()); + assertEquals(UnmanagedDOIProvider.ID, pid3.getProviderId()); + + String pid4String = "perma:bad"; + GlobalId pid4 = PidUtil.parseAsGlobalID(pid4String); + assertEquals(pid4String, pid4.asString()); + assertEquals(UnmanagedPermaLinkPidProvider.ID, pid4.getProviderId()); + + String pid5String = "perma:DANSLINKQE123456"; + GlobalId pid5 = PidUtil.parseAsGlobalID(pid5String); + assertEquals(pid5String, pid5.asString()); + assertEquals(UnmanagedPermaLinkPidProvider.ID, pid5.getProviderId()); + + String pid6String = "perma:LINKIT123456"; + GlobalId pid6 = PidUtil.parseAsGlobalID(pid6String); + assertEquals(pid6String, pid6.asString()); + assertEquals(UnmanagedPermaLinkPidProvider.ID, pid6.getProviderId()); + + + } + + @Test + public void testManagedSetParsing() throws IOException { + + String pid1String = "doi:10.5073/FK3ABCDEF"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + assertEquals("fake1", pid2.getProviderId()); + assertEquals("https://doi.org/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); + assertEquals("10.5073", pid2.getAuthority()); + assertEquals(AbstractDOIProvider.DOI_PROTOCOL, pid2.getProtocol()); + GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); + assertEquals(pid1String, pid3.asString()); + assertEquals("fake1", pid3.getProviderId()); + assertFalse(PidUtil.getPidProvider(pid3.getProviderId()).canCreatePidsLike(pid3)); + + String pid4String = "hdl:20.20.20/FK2ABCDEF"; + GlobalId pid4 = PidUtil.parseAsGlobalID(pid4String); + assertEquals(pid4String, pid4.asString()); + assertEquals("hdl1", pid4.getProviderId()); + assertFalse(PidUtil.getPidProvider(pid4.getProviderId()).canCreatePidsLike(pid4)); + + String pid5String = "perma:LINKIT/FK2ABCDEF"; + GlobalId pid5 = PidUtil.parseAsGlobalID(pid5String); + assertEquals(pid5String, pid5.asString()); + assertEquals("perma2", pid5.getProviderId()); + assertFalse(PidUtil.getPidProvider(pid5.getProviderId()).canCreatePidsLike(pid5)); + } + + @Test + public void testFindingPidGenerators() throws IOException { + + Dataset dataset1 = new Dataset(); + Dataverse dataverse1 = new Dataverse(); + dataset1.setOwner(dataverse1); + String pidGeneratorSpecs = Json.createObjectBuilder().add("protocol", AbstractDOIProvider.DOI_PROTOCOL).add("authority","10.5072").add("shoulder", "FK2").build().toString(); + //Set a PID generator on the parent + dataverse1.setPidGeneratorSpecs(pidGeneratorSpecs); + assertEquals(pidGeneratorSpecs, dataverse1.getPidGeneratorSpecs()); + //Verify that the parent's PID generator is the effective one + assertEquals("ez1", dataverse1.getEffectivePidGenerator().getId()); + assertEquals("ez1", dataset1.getEffectivePidGenerator().getId()); + //Change dataset to have a provider and verify that it is used instead of any effective one + dataset1.setAuthority("10.5073"); + dataset1.setProtocol(AbstractDOIProvider.DOI_PROTOCOL); + dataset1.setIdentifier("FK2ABCDEF"); + //Reset to get rid of cached @transient value + dataset1.setPidGenerator(null); + assertEquals("dc1", dataset1.getGlobalId().getProviderId()); + assertEquals("dc1", dataset1.getEffectivePidGenerator().getId()); + assertTrue(PidUtil.getPidProvider(dataset1.getEffectivePidGenerator().getId()).canCreatePidsLike(dataset1.getGlobalId())); + + dataset1.setPidGenerator(null); + //Now set identifier so that the provider has this one in it's managed list (and therefore we can't mint new PIDs in the same auth/shoulder) and therefore we get the effective pid generator + dataset1.setIdentifier("FK3ABCDEF"); + assertEquals("fake1", dataset1.getGlobalId().getProviderId()); + assertEquals("ez1", dataset1.getEffectivePidGenerator().getId()); + + + + } + + @Test + @JvmSetting(key = JvmSettings.LEGACY_DATACITE_MDS_API_URL, value = "https://mds.test.datacite.org/") + @JvmSetting(key = JvmSettings.LEGACY_DATACITE_REST_API_URL, value = "https://api.test.datacite.org") + @JvmSetting(key = JvmSettings.LEGACY_DATACITE_USERNAME, value = "test2") + @JvmSetting(key = JvmSettings.LEGACY_DATACITE_PASSWORD, value = "changeme2") + public void testLegacyConfig() throws IOException { + MockitoAnnotations.openMocks(this); + Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider)).thenReturn("DataCite"); + Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Shoulder)).thenReturn("FK2"); + + Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol)).thenReturn("doi"); + Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority)).thenReturn("10.5075"); + + + + String protocol = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol); + String authority = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority); + String shoulder = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Shoulder); + String provider = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider); + + if (protocol != null && authority != null && shoulder != null && provider != null) { + // This line is different than in PidProviderFactoryBean because here we've + // already added the unmanaged providers, so we can't look for null + if (!PidUtil.getPidProvider(protocol, authority, shoulder).canManagePID()) { + PidProvider legacy = null; + // Try to add a legacy provider + String identifierGenerationStyle = settingsServiceBean + .getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "random"); + String dataFilePidFormat = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, + "DEPENDENT"); + switch (provider) { + case "EZID": + /* + * String baseUrl = JvmSettings.PID_EZID_BASE_URL.lookup(String.class); String + * username = JvmSettings.PID_EZID_USERNAME.lookup(String.class); String + * password = JvmSettings.PID_EZID_PASSWORD.lookup(String.class); + * legacy = new EZIdDOIProvider("legacy", "legacy", authority, + * shoulder, identifierGenerationStyle, dataFilePidFormat, "", "", baseUrl, + * username, password); + */ + break; + case "DataCite": + String mdsUrl = JvmSettings.LEGACY_DATACITE_MDS_API_URL.lookup(String.class); + String restUrl = JvmSettings.LEGACY_DATACITE_REST_API_URL.lookup(String.class); + String dcUsername = JvmSettings.LEGACY_DATACITE_USERNAME.lookup(String.class); + String dcPassword = JvmSettings.LEGACY_DATACITE_PASSWORD.lookup(String.class); + if (mdsUrl != null && restUrl != null && dcUsername != null && dcPassword != null) { + legacy = new DataCiteDOIProvider("legacy", "legacy", authority, shoulder, + identifierGenerationStyle, dataFilePidFormat, "", "", mdsUrl, restUrl, dcUsername, + dcPassword); + } + break; + case "FAKE": + System.out.println("Legacy FAKE found"); + legacy = new FakeDOIProvider("legacy", "legacy", authority, shoulder, + identifierGenerationStyle, dataFilePidFormat, "", ""); + break; + } + if (legacy != null) { + // Not testing parts that require this bean + legacy.setPidProviderServiceBean(null); + PidUtil.addToProviderList(legacy); + } + } else { + System.out.println("Legacy PID provider settings found - ignored since a provider for the same protocol, authority, shoulder has been registered"); + } + + } + + String pid1String = "doi:10.5075/FK2ABCDEF"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + assertEquals("legacy", pid2.getProviderId()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteProviderTest.java new file mode 100644 index 00000000000..572fc722272 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteProviderTest.java @@ -0,0 +1,187 @@ +package edu.harvard.iq.dataverse.pidproviders.doi.datacite; + +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldConstant; +import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.pidproviders.PidProviderFactoryBean; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; + +@ExtendWith(MockitoExtension.class) +@LocalJvmSettings +@JvmSetting(key = JvmSettings.SITE_URL, value = "https://example.com") + +public class DataCiteProviderTest { + + static DataverseServiceBean dataverseSvc; + static SettingsServiceBean settingsSvc; + static PidProviderFactoryBean pidService; + static final String DEFAULT_NAME = "LibraScholar"; + + @BeforeAll + public static void setupMocks() { + dataverseSvc = Mockito.mock(DataverseServiceBean.class); + settingsSvc = Mockito.mock(SettingsServiceBean.class); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + + // initial values (needed here for other tests where this method is reused!) + Mockito.when(settingsSvc.getValueForKey(SettingsServiceBean.Key.InstallationName)).thenReturn(DEFAULT_NAME); + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn(DEFAULT_NAME); + + pidService = Mockito.mock(PidProviderFactoryBean.class); + Mockito.when(pidService.isGlobalIdLocallyUnique(any(GlobalId.class))).thenReturn(true); + Mockito.when(pidService.getProducer()).thenReturn("RootDataverse"); + + } + + /** + * Useful for testing but requires DataCite credentials, etc. + * + * To run the test: + * export DataCiteUsername=test2 + * export DataCitePassword=changeme2 + * export DataCiteAuthority=10.5072 + * export DataCiteShoulder=FK2 + * + * then run mvn test -Dtest=DataCiteProviderTest + * + * For each run of the test, one test DOI will be created and will remain in the registered state, as visible on Fabrica at doi.test.datacite.org + * (two DOIs are created, but one is deleted after being created in the draft state and never made findable.) + */ + @Test + @Disabled + public void testDoiLifecycle() throws IOException { + String username = System.getenv("DataCiteUsername"); + String password = System.getenv("DataCitePassword"); + String authority = System.getenv("DataCiteAuthority"); + String shoulder = System.getenv("DataCiteShoulder"); + DataCiteDOIProvider provider = new DataCiteDOIProvider("test", "test", authority, shoulder, "randomString", + SystemConfig.DataFilePIDFormat.DEPENDENT.toString(), "", "", "https://mds.test.datacite.org", + "https://api.test.datacite.org", username, password); + + provider.setPidProviderServiceBean(pidService); + + PidUtil.addToProviderList(provider); + + Dataset d = new Dataset(); + DatasetVersion dv = new DatasetVersion(); + DatasetFieldType primitiveDSFType = new DatasetFieldType(DatasetFieldConstant.title, + DatasetFieldType.FieldType.TEXT, false); + DatasetField testDatasetField = new DatasetField(); + + dv.setVersionState(VersionState.DRAFT); + + testDatasetField.setDatasetVersion(dv); + testDatasetField.setDatasetFieldType(primitiveDSFType); + testDatasetField.setSingleValue("First Title"); + List fields = new ArrayList<>(); + fields.add(testDatasetField); + dv.setDatasetFields(fields); + ArrayList dsvs = new ArrayList<>(); + dsvs.add(0, dv); + d.setVersions(dsvs); + + assertEquals(d.getCurrentName(), "First Title"); + + provider.generatePid(d); + assertEquals(d.getProtocol(), "doi"); + assertEquals(d.getAuthority(), authority); + assertTrue(d.getIdentifier().startsWith(shoulder)); + d.getGlobalId(); + + try { + provider.createIdentifier(d); + d.setIdentifierRegistered(true); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertEquals(DataCiteDOIProvider.DRAFT, provider.getPidStatus(d)); + Map mdMap = provider.getIdentifierMetadata(d); + assertEquals("First Title", mdMap.get("datacite.title")); + + testDatasetField.setSingleValue("Second Title"); + + //Modify called for a draft dataset shouldn't update DataCite (given current code) + try { + provider.modifyIdentifierTargetURL(d); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + //Verify the title hasn't changed + mdMap = provider.getIdentifierMetadata(d); + assertEquals("First Title", mdMap.get("datacite.title")); + //Check our local status + assertEquals(DataCiteDOIProvider.DRAFT, provider.getPidStatus(d)); + //Now delete the identifier + provider.deleteIdentifier(d); + //Causes a 404 and a caught exception that prints a stack trace. + mdMap = provider.getIdentifierMetadata(d); + // And verify the record is gone (no title, should be no entries at all) + assertEquals(null, mdMap.get("datacite.title")); + + //Now recreate and publicize in one step + assertTrue(provider.publicizeIdentifier(d)); + d.getLatestVersion().setVersionState(VersionState.RELEASED); + + //Verify the title hasn't changed + mdMap = provider.getIdentifierMetadata(d); + assertEquals("Second Title", mdMap.get("datacite.title")); + //Check our local status + assertEquals(DataCiteDOIProvider.FINDABLE, provider.getPidStatus(d)); + + //Verify that modify does update a published/findable record + testDatasetField.setSingleValue("Third Title"); + + try { + provider.modifyIdentifierTargetURL(d); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + mdMap = provider.getIdentifierMetadata(d); + assertEquals("Third Title", mdMap.get("datacite.title")); + + //Now delete the identifier . Once it's been findable, this should just flip the record to registered + //Not sure that can be easily verified in the test, but it will be visible in Fabrica + provider.deleteIdentifier(d); + d.getLatestVersion().setVersionState(VersionState.DEACCESSIONED); + + mdMap = provider.getIdentifierMetadata(d); + assertEquals("This item has been removed from publication", mdMap.get("datacite.title")); + + //Check our local status - just uses the version state + assertEquals(DataCiteDOIProvider.REGISTERED, provider.getPidStatus(d)); + + // provider.registerWhenPublished() + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index adf48e05f09..92b06e5936f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.Dataverse.DataverseType; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.mocks.MocksFactory; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -134,7 +135,7 @@ private DatasetField constructBoundingBoxValue(String datasetFieldTypeName, Stri private IndexableDataset createIndexableDataset() { final Dataset dataset = MocksFactory.makeDataset(); - dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.666", "FAKE/fake", "/", DOIServiceBean.DOI_RESOLVER_URL, null)); + dataset.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.666", "FAKE/fake", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null)); final DatasetVersion datasetVersion = dataset.getCreateVersion(null); DatasetField field = createCVVField("language", "English", false); datasetVersion.getDatasetFields().add(field); diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java index 6b03f20fc41..f4494b7116e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java @@ -17,22 +17,15 @@ void lookupSetting() { } @Test - @SystemProperty(key = "doi.username", value = "test") - void lookupSettingViaAlias() { - assertEquals("test", JvmSettings.DATACITE_USERNAME.lookup()); + @SystemProperty(key = "dataverse.pid.datacite.datacite.username", value = "test") + void lookupPidProviderSetting() { + assertEquals("test", JvmSettings.DATACITE_USERNAME.lookup("datacite")); } @Test - @SystemProperty(key = "doi.baseurlstring", value = "test") + @SystemProperty(key = "dataverse.ingest.rserve.port", value = "1234") void lookupSettingViaAliasWithDefaultInMPCFile() { - assertEquals("test", JvmSettings.DATACITE_MDS_API_URL.lookup()); - } - - @Test - @SystemProperty(key = "doi.dataciterestapiurlstring", value = "foo") - @SystemProperty(key = "doi.mdcbaseurlstring", value = "bar") - void lookupSettingViaAliasWithDefaultInMPCFileAndTwoAliases() { - assertEquals("foo", JvmSettings.DATACITE_REST_API_URL.lookup()); + assertEquals("1234", JvmSettings.RSERVE_PORT.lookup()); } } \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java index 41032ffa811..310bec72c2e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java @@ -1,11 +1,11 @@ package edu.harvard.iq.dataverse.sitemap; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.util.xml.XmlPrinter; import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; @@ -66,14 +66,14 @@ void testUpdateSiteMap() throws IOException, ParseException, SAXException { List datasets = new ArrayList<>(); Dataset published = new Dataset(); - published.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/published1", null, DOIServiceBean.DOI_RESOLVER_URL, null)); + published.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, "10.666", "FAKE/published1", null, AbstractDOIProvider.DOI_RESOLVER_URL, null)); String publishedPid = published.getGlobalId().asString(); published.setPublicationDate(new Timestamp(new Date().getTime())); published.setModificationTime(new Timestamp(new Date().getTime())); datasets.add(published); Dataset unpublished = new Dataset(); - unpublished.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/unpublished1", null, DOIServiceBean.DOI_RESOLVER_URL, null)); + unpublished.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, "10.666", "FAKE/unpublished1", null, AbstractDOIProvider.DOI_RESOLVER_URL, null)); String unpublishedPid = unpublished.getGlobalId().asString(); Timestamp nullPublicationDateToIndicateNotPublished = null; @@ -81,14 +81,14 @@ void testUpdateSiteMap() throws IOException, ParseException, SAXException { datasets.add(unpublished); Dataset harvested = new Dataset(); - harvested.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/harvested1", null, DOIServiceBean.DOI_RESOLVER_URL, null)); + harvested.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, "10.666", "FAKE/harvested1", null, AbstractDOIProvider.DOI_RESOLVER_URL, null)); String harvestedPid = harvested.getGlobalId().asString(); harvested.setPublicationDate(new Timestamp(new Date().getTime())); harvested.setHarvestedFrom(new HarvestingClient()); datasets.add(harvested); Dataset deaccessioned = new Dataset(); - deaccessioned.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/deaccessioned1", null, DOIServiceBean.DOI_RESOLVER_URL, null)); + deaccessioned.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL, "10.666", "FAKE/deaccessioned1", null, AbstractDOIProvider.DOI_RESOLVER_URL, null)); String deaccessionedPid = deaccessioned.getGlobalId().asString(); deaccessioned.setPublicationDate(new Timestamp(new Date().getTime())); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java index d70a108e7c6..15905c2971b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java @@ -1,12 +1,12 @@ package edu.harvard.iq.dataverse.util; -import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; @@ -32,7 +32,7 @@ void testGetToolUrlWithOptionalQueryParameters() { DatasetVersion dv = new DatasetVersion(); Dataset ds = new Dataset(); ds.setId(50L); - ds.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.5072","FK2ABCDEF",null, DOIServiceBean.DOI_RESOLVER_URL, null)); + ds.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2ABCDEF",null, AbstractDOIProvider.DOI_RESOLVER_URL, null)); dv.setDataset(ds); fmd.setDatasetVersion(dv); List fmdl = new ArrayList<>(); From 4716c7ae18e89d9a3fe602e411953b23c981d5b3 Mon Sep 17 00:00:00 2001 From: luddaniel <83018819+luddaniel@users.noreply.github.com> Date: Wed, 6 Mar 2024 15:38:47 +0100 Subject: [PATCH 0797/1112] Returning to author now requires a reason that is sent by email to the author (#10137) * #3702 - Returning to author now requires a commented reason that is sent by email to the author * Update src/main/java/propertyFiles/Bundle.properties Co-authored-by: Philip Durbin * #3702 - Increased maxlength of message to 2000 + Added email contacts of collection in return to author email * #3702 - Added test on null or empty comment for ReturnDatasetToAuthorCommand * #3702 fixed Unit Tests errors * #3702 - Code commentary to be removed * #3702 - Adding release note * #3702 - Adding last Bundle.properties corrections * #3702 - Updated release note and guide --------- Co-authored-by: Philip Durbin --- doc/release-notes/3702-return-to-author.md | 4 +++ doc/sphinx-guides/source/api/native-api.rst | 3 +- .../edu/harvard/iq/dataverse/DatasetPage.java | 13 ++++++-- .../harvard/iq/dataverse/MailServiceBean.java | 26 ++++++++++------ .../harvard/iq/dataverse/api/Datasets.java | 3 +- .../iq/dataverse/api/Notifications.java | 1 - .../impl/ReturnDatasetToAuthorCommand.java | 5 +++ src/main/java/propertyFiles/Bundle.properties | 13 +++++--- src/main/webapp/dataset.xhtml | 31 ++++++++++--------- src/main/webapp/dataverseuser.xhtml | 3 -- .../ReturnDatasetToAuthorCommandTest.java | 31 +++++++++---------- 11 files changed, 78 insertions(+), 55 deletions(-) create mode 100644 doc/release-notes/3702-return-to-author.md diff --git a/doc/release-notes/3702-return-to-author.md b/doc/release-notes/3702-return-to-author.md new file mode 100644 index 00000000000..aa7dd9feaef --- /dev/null +++ b/doc/release-notes/3702-return-to-author.md @@ -0,0 +1,4 @@ +### Return to author + +Popup for returning to author now requires a reason that will be sent by email to the author. +Please note that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. \ No newline at end of file diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 7f048f96eb9..70d73ae3c98 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2126,7 +2126,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/returnToAuthor?persistentId=doi:10.5072/FK2/J8SJZB" -H "Content-type: application/json" -d @reason-for-return.json -The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is persisted into the database, stored at the dataset version level. +The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is sent by email and is persisted into the database, stored at the dataset version level. +The reason is required, please note that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. The :ref:`send-feedback` API call may be useful as a way to move the conversation to email. However, note that these emails go to contacts (versus authors) and there is no database record of the email contents. (:ref:`dataverse.mail.cc-support-on-contact-email` will send a copy of these emails to the support email address which would provide a record.) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 88b1f4f49bc..0641039e433 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -709,6 +709,16 @@ public void setNumberOfFilesToShow(Long numberOfFilesToShow) { this.numberOfFilesToShow = numberOfFilesToShow; } + private String returnReason = ""; + + public String getReturnReason() { + return returnReason; + } + + public void setReturnReason(String returnReason) { + this.returnReason = returnReason; + } + public void showAll(){ setNumberOfFilesToShow(new Long(fileMetadatasSearch.size())); } @@ -2653,8 +2663,7 @@ public void edit(EditMode editMode) { public String sendBackToContributor() { try { - //FIXME - Get Return Comment from sendBackToContributor popup - Command cmd = new ReturnDatasetToAuthorCommand(dvRequestService.getDataverseRequest(), dataset, ""); + Command cmd = new ReturnDatasetToAuthorCommand(dvRequestService.getDataverseRequest(), dataset, returnReason); dataset = commandEngine.submit(cmd); JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.reject.success")); } catch (CommandException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 72fc6ee6d64..4b591d240bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -466,18 +466,24 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio case RETURNEDDS: version = (DatasetVersion) targetObject; pattern = BundleUtil.getStringFromBundle("notification.email.wasReturnedByReviewer"); - String optionalReturnReason = ""; - /* - FIXME - Setting up to add single comment when design completed - optionalReturnReason = "."; - if (comment != null && !comment.isEmpty()) { - optionalReturnReason = ".\n\n" + BundleUtil.getStringFromBundle("wasReturnedReason") + "\n\n" + comment; - } - */ + String[] paramArrayReturnedDataset = {version.getDataset().getDisplayName(), getDatasetDraftLink(version.getDataset()), - version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner()), optionalReturnReason}; + version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner())}; messageText += MessageFormat.format(pattern, paramArrayReturnedDataset); + + if (comment != null && !comment.isEmpty()) { + messageText += "\n\n" + MessageFormat.format(BundleUtil.getStringFromBundle("notification.email.wasReturnedByReviewerReason"), comment); + } + + Dataverse d = (Dataverse) version.getDataset().getOwner(); + List contactEmailList = new ArrayList(); + for (DataverseContact dc : d.getDataverseContacts()) { + contactEmailList.add(dc.getContactEmail()); + } + if (!contactEmailList.isEmpty()) { + String contactEmails = String.join(", ", contactEmailList); + messageText += "\n\n" + MessageFormat.format(BundleUtil.getStringFromBundle("notification.email.wasReturnedByReviewer.collectionContacts"), contactEmails); + } return messageText; case WORKFLOW_SUCCESS: diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e312d6ec15b..ad66fb468f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2145,9 +2145,8 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam( Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; reasonForReturn = json.getString("reasonForReturn"); - // TODO: Once we add a box for the curator to type into, pass the reason for return to the ReturnDatasetToAuthorCommand and delete this check and call to setReturnReason on the API side. if (reasonForReturn == null || reasonForReturn.isEmpty()) { - return error(Response.Status.BAD_REQUEST, "You must enter a reason for returning a dataset to the author(s)."); + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview")); } AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc); Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn )); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java index 37c894d3071..df172f36973 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java @@ -55,7 +55,6 @@ public Response getAllNotificationsForUser(@Context ContainerRequestContext crc) notificationObjectBuilder.add("id", notification.getId()); notificationObjectBuilder.add("type", type.toString()); /* FIXME - Re-add reasons for return if/when they are added to the notifications page. - if (Type.RETURNEDDS.equals(type) || Type.SUBMITTEDDS.equals(type)) { JsonArrayBuilder reasons = getReasonsForReturn(notification); for (JsonValue reason : reasons.build()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java index caf37ad4de1..f3b33f82524 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java @@ -25,6 +25,11 @@ public class ReturnDatasetToAuthorCommand extends AbstractDatasetCommand
    You may contact us for support at {0}.

    Thank you,
    {1} notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3} ). @@ -1476,7 +1478,7 @@ dataset.submit.failure.inReview=You cannot submit this dataset for review becaus dataset.status.failure.notallowed=Status update failed - label not allowed dataset.status.failure.disabled=Status labeling disabled for this dataset dataset.status.failure.isReleased=Latest version of dataset is already released. Status can only be set on draft versions -dataset.rejectMessage=Return this dataset to contributor for modification. +dataset.rejectMessage=Return this dataset to contributor for modification. The reason for return entered below will be sent by email to the author. dataset.rejectMessage.label=Return to Author Reason dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). dataset.reject.enterReason.error=Reason for return to author is required. @@ -1484,6 +1486,7 @@ dataset.reject.success=This dataset has been sent back to the contributor. dataset.reject.failure=Dataset Submission Return Failed - {0} dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null. dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first. +dataset.reject.commentNull=You must enter a reason for returning a dataset to the author(s). dataset.publish.tip=Are you sure you want to publish this dataset? Once you do so it must remain published. dataset.publish.terms.tip=This version of the dataset will be published with the following terms: dataset.publish.terms.help.tip=To change the terms for this version, click the Cancel button and go to the Terms tab for this dataset. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 4bb1ec869f6..34c6d3dcbea 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1843,27 +1843,30 @@

    #{bundle['dataset.rejectMessage']}

    - - - - -

    - -

    - -
    + + +

    + +

    + + +
    - + + +
    - + diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index 2426cf980d3..9ed8b5209b6 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -178,9 +178,6 @@ #{DataverseUserPage.getRequestorEmail(item)} - - #{DataverseUserPage.getReasonForReturn(item.theObject)} - diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java index 23cc4547bc4..fc52abecaf2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java @@ -155,7 +155,7 @@ public void testReleasedDataset() { String actual = null; Dataset updatedDataset = null; try { - updatedDataset = testEngine.submit(new ReturnDatasetToAuthorCommand(dataverseRequest, dataset, "")); + updatedDataset = testEngine.submit(new ReturnDatasetToAuthorCommand(dataverseRequest, dataset, "Update Your Files, Dummy")); } catch (CommandException ex) { actual = ex.getMessage(); } @@ -171,36 +171,33 @@ public void testNotInReviewDataset() { String actual = null; Dataset updatedDataset = null; try { - updatedDataset = testEngine.submit(new ReturnDatasetToAuthorCommand(dataverseRequest, dataset, "")); + updatedDataset = testEngine.submit(new ReturnDatasetToAuthorCommand(dataverseRequest, dataset, "Update Your Files, Dummy")); } catch (CommandException ex) { actual = ex.getMessage(); } assertEquals(expected, actual); } - /* - FIXME - Empty Comments won't be allowed in future @Test - public void testEmptyComments(){ - - dataset.setIdentifier("DUMMY"); + public void testEmptyOrNullComment(){ dataset.getLatestVersion().setVersionState(DatasetVersion.VersionState.DRAFT); - dataset.getLatestVersion().setInReview(true); - dataset.getLatestVersion().setReturnReason(null); + Dataset updatedDataset = null; String expected = "You must enter a reason for returning a dataset to the author(s)."; String actual = null; - Dataset updatedDataset = null; try { - - updatedDataset = testEngine.submit(new ReturnDatasetToAuthorCommand(dataverseRequest, dataset)); - } catch (CommandException ex) { + testEngine.submit( new AddLockCommand(dataverseRequest, dataset, + new DatasetLock(DatasetLock.Reason.InReview, dataverseRequest.getAuthenticatedUser()))); + + assertThrowsExactly(IllegalArgumentException.class, + () -> new ReturnDatasetToAuthorCommand(dataverseRequest, dataset, null), expected); + assertThrowsExactly(IllegalArgumentException.class, + () -> new ReturnDatasetToAuthorCommand(dataverseRequest, dataset, ""), expected); + updatedDataset = testEngine.submit(new ReturnDatasetToAuthorCommand(dataverseRequest, dataset, "")); + } catch (IllegalArgumentException | CommandException ex) { actual = ex.getMessage(); } - assertEquals(expected, actual); - - + assertEquals(expected, actual); } - */ @Test public void testAllGood() { From 8e90802dd102ab5bb5b7d2251fa373a5c555bc3d Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 6 Mar 2024 09:58:30 -0500 Subject: [PATCH 0798/1112] Removed some changes after slack conversation --- .../iq/dataverse/api/AbstractApiBean.java | 11 +++----- .../harvard/iq/dataverse/api/Datasets.java | 26 +++++++------------ .../edu/harvard/iq/dataverse/api/Files.java | 5 ++-- .../impl/GetDraftDatasetVersionCommand.java | 2 +- ...raftDatasetVersionUnrestrictedCommand.java | 26 ------------------- ...LatestAccessibleDatasetVersionCommand.java | 20 +++++--------- 6 files changed, 22 insertions(+), 68 deletions(-) delete mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionUnrestrictedCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index ecdffda8a1b..60e0b79662b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -17,7 +17,6 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionUnrestrictedCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; @@ -397,13 +396,12 @@ protected Dataset findDatasetOrDie(String id) throws WrappedResponse { } } - protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, - boolean includeDeaccessioned, boolean checkFilePerms) throws WrappedResponse { + protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { DatasetVersion dsv = execCommand(handleVersion(versionNumber, new Datasets.DsVersionHandler>() { @Override public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkFilePerms); + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); } @Override @@ -413,14 +411,13 @@ public Command handleDraft() { @Override public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned, checkFilePerms); + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned, checkPermsWhenDeaccessioned); } @Override public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned, checkFilePerms); + return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); } - })); return dsv; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index fd09e7b693e..1ebd6b487a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -428,26 +428,22 @@ public Response getVersion(@Context ContainerRequestContext crc, //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. - boolean checkFilePerms = excludeFiles == null ? true : !excludeFiles; + boolean checkPerms = excludeFiles == null ? true : !excludeFiles; Dataset dst = findDatasetOrDie(datasetId); DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, versionId, dst, uriInfo, headers, includeDeaccessioned, - checkFilePerms); + checkPerms); - DatasetVersion latestDatasetVersion = requestedDatasetVersion; - - //We need to retrieve the latest version to check the status as request of the SPA, we have to set the - //deaccesionedLookup to true to check to include deaccessioned datasets in the lookup - //checkFilePerms is set to false because we are not going to check the status of the latest version only - //if the user is requesting already the latest version don't need to check + DatasetVersion latestDatasetVersion = null; boolean deaccesionedLookup = true; - checkFilePerms = false; + //Check perms is false since we are never going to retrieve files, we are just getting the status of the version. + checkPerms = false; if(versionId != DS_VERSION_LATEST){ latestDatasetVersion = getDatasetVersionOrDie(req, DS_VERSION_LATEST, dst, uriInfo, headers, deaccesionedLookup, - checkFilePerms); + checkPerms); } else { latestDatasetVersion = requestedDatasetVersion; } @@ -2754,10 +2750,10 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error. */ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, - UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkFilePerms) + UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { - DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkFilePerms); + DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); if (dsv == null || dsv.getId() == null) { throw new WrappedResponse( @@ -2769,11 +2765,7 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String } return dsv; } - - - - - + @GET @Path("{identifier}/locks") public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 9f361c87702..2f22da7840c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -947,9 +947,8 @@ public Response getFileCitationByVersion(@Context ContainerRequestContext crc, @ DataverseRequest req = createDataverseRequest(getRequestUser(crc)); final DataFile df = execCommand(new GetDataFileCommand(req, findDataFileOrDie(fileIdOrPersistentId))); Dataset ds = df.getOwner(); - boolean checkFilePerms = true; - boolean checkUserPerms = true; - DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkFilePerms); + + DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, true); if (dsv == null) { return unauthorized(BundleUtil.getStringFromBundle("files.api.no.draftOrUnauth")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java index cd422c67497..7e32b19e576 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionCommand.java @@ -13,7 +13,7 @@ * * @author Naomi */ -@RequiredPermissions(Permission.ViewUnpublishedDataset) +@RequiredPermissions( Permission.ViewUnpublishedDataset ) public class GetDraftDatasetVersionCommand extends AbstractCommand{ private final Dataset ds; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionUnrestrictedCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionUnrestrictedCommand.java deleted file mode 100644 index d52a67fcfbc..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDraftDatasetVersionUnrestrictedCommand.java +++ /dev/null @@ -1,26 +0,0 @@ -package edu.harvard.iq.dataverse.engine.command.impl; - -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; -import edu.harvard.iq.dataverse.engine.command.CommandContext; -import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; - -@RequiredPermissions({}) -public class GetDraftDatasetVersionUnrestrictedCommand extends AbstractCommand{ - - private final Dataset ds; - - public GetDraftDatasetVersionUnrestrictedCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { - super(aRequest, anAffectedDataset); - ds = anAffectedDataset; - } - - @Override - public DatasetVersion execute(CommandContext ctxt) throws CommandException { - return ds.getOrCreateEditVersion(); - } - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index b8dfa5a97ad..7bcc851bde2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -25,32 +25,24 @@ public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final boolean includeDeaccessioned; - private boolean checkFilePerms; + private boolean checkPerms; public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { this(aRequest, anAffectedDataset, false, false); } - public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, - boolean includeDeaccessioned, boolean checkFilePerms) { - + public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPerms) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; this.includeDeaccessioned = includeDeaccessioned; - this.checkFilePerms = checkFilePerms; + this.checkPerms = checkPerms; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - - DatasetVersion latestAccessibleDatasetVersion = null; - - if(ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)){ - latestAccessibleDatasetVersion = ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); - } else { - latestAccessibleDatasetVersion = ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand( - getRequest(), ds, includeDeaccessioned, checkFilePerms)); + if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { + return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); } - return latestAccessibleDatasetVersion; + return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned, checkPerms)); } } From 3417751a376a04a84432b92e5dfd579e51e06344 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 6 Mar 2024 10:27:31 -0500 Subject: [PATCH 0799/1112] flyway script bump --- ...id-providers.sql => V6.1.0.5__3623-multiple-pid-providers.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.1.0.4__3623-multiple-pid-providers.sql => V6.1.0.5__3623-multiple-pid-providers.sql} (100%) diff --git a/src/main/resources/db/migration/V6.1.0.4__3623-multiple-pid-providers.sql b/src/main/resources/db/migration/V6.1.0.5__3623-multiple-pid-providers.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.4__3623-multiple-pid-providers.sql rename to src/main/resources/db/migration/V6.1.0.5__3623-multiple-pid-providers.sql From 38d54bc8f3147e83b254ae72f7cbeaab219ee9c6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 6 Mar 2024 11:23:00 -0600 Subject: [PATCH 0800/1112] no-op update to pom.xml to force Docker image build As described in .github/workflows/maven_unit_test.yml we need to touch Java files or pom.xml for this action (which pushes Docker images) to run. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index aaa2b49eaae..8b2850e1df9 100644 --- a/pom.xml +++ b/pom.xml @@ -57,7 +57,7 @@ In case the dependency is both transitive and direct (e. g. some common lib for logging), manage the version above and add the direct dependency here WITHOUT version tag, too. --> - + + + redis.clients + jedis + 5.1.0 + + org.junit.jupiter diff --git a/scripts/installer/default.config b/scripts/installer/default.config index 8647cd02416..2a29a1d5270 100644 --- a/scripts/installer/default.config +++ b/scripts/installer/default.config @@ -32,3 +32,9 @@ DOI_USERNAME = dataciteuser DOI_PASSWORD = datacitepassword DOI_BASEURL = https://mds.test.datacite.org DOI_DATACITERESTAPIURL = https://api.test.datacite.org + +[redis] +REDIS_HOST = redis +REDIS_PORT = 6379 +REDIS_USER = default +REDIS_PASSWORD = redis_secret diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 99316efb83b..6d6003607bd 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -100,7 +100,8 @@ "database", "rserve", "system", - "doi"] + "doi", + "redis"] # read pre-defined defaults: diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py index 698f5ba9a58..faa5bf42341 100644 --- a/scripts/installer/installAppServer.py +++ b/scripts/installer/installAppServer.py @@ -30,6 +30,11 @@ def runAsadminScript(config): os.environ['DOI_PASSWORD'] = config.get('doi','DOI_PASSWORD') os.environ['DOI_DATACITERESTAPIURL'] = config.get('doi','DOI_DATACITERESTAPIURL') + os.environ['REDIS_HOST'] = config.get('redis','REDIS_HOST') + os.environ['REDIS_PORT'] = config.get('redis','REDIS_PORT') + os.environ['REDIS_USER'] = config.get('redis','REDIS_USER') + os.environ['REDIS_PASS'] = config.get('redis','REDIS_PASSWORD') + mailServerEntry = config.get('system','MAIL_SERVER') try: diff --git a/scripts/installer/interactive.config b/scripts/installer/interactive.config index ef8110c554f..9e0fafaa8b4 100644 --- a/scripts/installer/interactive.config +++ b/scripts/installer/interactive.config @@ -24,6 +24,10 @@ DOI_USERNAME = Datacite username DOI_PASSWORD = Datacite password DOI_BASEURL = Datacite URL DOI_DATACITERESTAPIURL = Datacite REST API URL +REDIS_HOST = Redis Server +REDIS_PORT = Redis Server Port +REDIS_USER = Redis User Name +REDIS_PASSWORD = Redis User Password [comments] HOST_DNS_ADDRESS = :(enter numeric IP address, if FQDN is unavailable) GLASSFISH_USER = :This user will be running the App. Server (Payara) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n: @@ -46,3 +50,7 @@ DOI_USERNAME = DataCite or EZID username. Only necessary for publishing / mintin DOI_PASSWORD = DataCite or EZID account password. DOI_BASEURL = DataCite or EZID URL. Probably https://mds.datacite.org DOI_DATACITERESTAPIURL = DataCite REST API URL (Make Data Count, /pids API). Probably https://api.datacite.org +REDIS_HOST = +REDIS_PORT = +REDIS_USER = +REDIS_PASSWORD = diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 3793b6eeeb4..8636172b731 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; +import edu.harvard.iq.dataverse.cache.CacheFactoryBean; import edu.harvard.iq.dataverse.engine.DataverseEngine; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; @@ -16,6 +17,7 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.engine.command.exception.RateLimitCommandException; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.pidproviders.PidProviderFactoryBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; @@ -176,7 +178,9 @@ public class EjbDataverseEngine { @EJB EjbDataverseEngineInner innerEngine; - + + @EJB + CacheFactoryBean cacheFactory; @Resource EJBContext ejbCtxt; @@ -202,7 +206,11 @@ public R submit(Command aCommand) throws CommandException { try { logRec.setUserIdentifier( aCommand.getRequest().getUser().getIdentifier() ); - + // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing. + if (!cacheFactory.checkRate(aCommand.getRequest().getUser(), aCommand.getClass().getSimpleName())) { + throw new RateLimitCommandException(BundleUtil.getStringFromBundle("command.exception.user.ratelimited", Arrays.asList(aCommand.getClass().getSimpleName())), aCommand); + } + // Check permissions - or throw an exception Map> requiredMap = aCommand.getRequiredPermissions(); if (requiredMap == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java index 93892376edc..50680b67cee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java @@ -147,6 +147,8 @@ private AuthenticatedUser createAuthenticatedUserForView (Object[] dbRowValues, user.setMutedEmails(Type.tokenizeToSet((String) dbRowValues[15])); user.setMutedNotifications(Type.tokenizeToSet((String) dbRowValues[15])); + user.setRateLimitTier(Integer.valueOf((int)dbRowValues[16])); + user.setRoles(roles); return user; } @@ -419,7 +421,7 @@ private List getUserListCore(String searchTerm, qstr += " u.createdtime, u.lastlogintime, u.lastapiusetime, "; qstr += " prov.id, prov.factoryalias, "; qstr += " u.deactivated, u.deactivatedtime, "; - qstr += " u.mutedEmails, u.mutedNotifications "; + qstr += " u.mutedEmails, u.mutedNotifications, u.rateLimitTier "; qstr += " FROM authenticateduser u,"; qstr += " authenticateduserlookup prov_lookup,"; qstr += " authenticationproviderrow prov"; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 60e0b79662b..44629d5dd76 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -20,6 +20,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.exception.RateLimitCommandException; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.locality.StorageSiteServiceBean; @@ -421,7 +422,7 @@ public Command handleLatestPublished() { })); return dsv; } - + protected DataFile findDataFileOrDie(String id) throws WrappedResponse { DataFile datafile; if (id.equals(PERSISTENT_ID_KEY)) { @@ -575,6 +576,8 @@ protected T execCommand( Command cmd ) throws WrappedResponse { try { return engineSvc.submit(cmd); + } catch (RateLimitCommandException ex) { + throw new WrappedResponse(rateLimited(ex.getMessage())); } catch (IllegalCommandException ex) { //for 8859 for api calls that try to update datasets with TOA out of compliance if (ex.getMessage().toLowerCase().contains("terms of use")){ @@ -776,11 +779,12 @@ protected Response notFound( String msg ) { protected Response badRequest( String msg ) { return error( Status.BAD_REQUEST, msg ); } - + protected Response forbidden( String msg ) { return error( Status.FORBIDDEN, msg ); } - + protected Response rateLimited( String msg ) { return error( Status.TOO_MANY_REQUESTS, msg ); } + protected Response conflict( String msg ) { return error( Status.CONFLICT, msg ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index b307c655798..ff884926a1f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -146,6 +146,9 @@ public class AuthenticatedUser implements User, Serializable { @Transient private Set mutedNotificationsSet = new HashSet<>(); + @Column(nullable=true) + private Integer rateLimitTier; + @PrePersist void prePersist() { mutedNotifications = Type.toStringValue(mutedNotificationsSet); @@ -397,6 +400,13 @@ public void setDeactivatedTime(Timestamp deactivatedTime) { this.deactivatedTime = deactivatedTime; } + public Integer getRateLimitTier() { + return rateLimitTier; + } + public void setRateLimitTier(Integer rateLimitTier) { + this.rateLimitTier = rateLimitTier; + } + @OneToOne(mappedBy = "authenticatedUser") private AuthenticatedUserLookup authenticatedUserLookup; diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java new file mode 100644 index 00000000000..83ba7a418e4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -0,0 +1,60 @@ +package edu.harvard.iq.dataverse.cache; + +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.util.SystemConfig; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import redis.clients.jedis.JedisPool; +import redis.clients.jedis.JedisPoolConfig; + +import java.util.logging.Logger; + +@Stateless +@Named +public class CacheFactoryBean implements java.io.Serializable { + private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); + private static JedisPool jedisPool = null; + @EJB + SystemConfig systemConfig; + + @PostConstruct + public void init() { + logger.info("CacheFactoryBean.init Redis Host:Port " + systemConfig.getRedisBaseHost() + ":" + systemConfig.getRedisBasePort()); + jedisPool = new JedisPool(new JedisPoolConfig(), systemConfig.getRedisBaseHost(), Integer.valueOf(systemConfig.getRedisBasePort()), + systemConfig.getRedisUser(), systemConfig.getRedisPassword()); + } + @Override + protected void finalize() throws Throwable { + if (jedisPool != null) { + jedisPool.close(); + } + super.finalize(); + } + + /** + * Check if user can make this call or if they are rate limited + * @param user + * @param action + * @return true if user is superuser or rate not limited + */ + public boolean checkRate(User user, String action) { + if (user != null && user.isSuperuser()) { + return true; + }; + StringBuffer id = new StringBuffer(); + id.append(user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()); + if (action != null) { + id.append(":").append(action); + } + + // get the capacity, i.e. calls per hour, from config + int capacity = (user instanceof AuthenticatedUser) ? + RateLimitUtil.getCapacityByTier(systemConfig, ((AuthenticatedUser) user).getRateLimitTier()) : + RateLimitUtil.getCapacityByTier(systemConfig, 0); + return (!RateLimitUtil.rateLimited(jedisPool, id.toString(), capacity)); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java new file mode 100644 index 00000000000..14a4439bb56 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java @@ -0,0 +1,53 @@ +package edu.harvard.iq.dataverse.cache; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.ArrayList; +import java.util.List; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public class RateLimitSetting { + + @JsonProperty("tier") + private int tier; + @JsonProperty("limitPerHour") + private int limitPerHour = RateLimitUtil.NO_LIMIT; + @JsonProperty("actions") + private List rateLimitActions = new ArrayList<>(); + + private int defaultLimitPerHour; + + public RateLimitSetting() {} + + @JsonProperty("tier") + public void setTier(int tier) { + this.tier = tier; + } + @JsonProperty("tier") + public int getTier() { + return this.tier; + } + @JsonProperty("limitPerHour") + public void setLimitPerHour(int limitPerHour) { + this.limitPerHour = limitPerHour; + } + @JsonProperty("limitPerHour") + public int getLimitPerHour() { + return this.limitPerHour; + } + @JsonProperty("actions") + public void setRateLimitActions(List rateLimitActions) { + this.rateLimitActions = rateLimitActions; + } + @JsonProperty("actions") + public List getRateLimitActions() { + return this.rateLimitActions; + } + public void setDefaultLimit(int defaultLimitPerHour) { + this.defaultLimitPerHour = defaultLimitPerHour; + } + public int getDefaultLimitPerHour() { + return this.defaultLimitPerHour; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java new file mode 100644 index 00000000000..b97773e0312 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -0,0 +1,124 @@ +package edu.harvard.iq.dataverse.cache; + +import com.fasterxml.jackson.databind.ObjectMapper; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.SystemConfig; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisPool; + +import java.io.StringReader; +import java.util.*; +import java.util.logging.Logger; + +import static java.lang.Math.max; +import static java.lang.Math.min; + +public class RateLimitUtil { + private static final Logger logger = Logger.getLogger(RateLimitUtil.class.getCanonicalName()); + protected static final List rateLimits = new ArrayList<>(); + protected static final Map rateLimitMap = new HashMap<>(); + public static final int NO_LIMIT = -1; + + public static int getCapacityByTier(SystemConfig systemConfig, Integer tier) { + return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); + } + + public static boolean rateLimited(final JedisPool jedisPool, final String key, int capacityPerHour) { + if (capacityPerHour == NO_LIMIT) { + return false; + } + Jedis jedis; + try { + jedis = jedisPool.getResource(); + } catch (Exception e) { + // We can't rate limit if Redis is not reachable + logger.severe("RateLimitUtil.rateLimited jedisPool.getResource() " + e.getMessage()); + return false; + } + + long currentTime = System.currentTimeMillis() / 60000L; // convert to minutes + int tokensPerMinute = (int)Math.ceil(capacityPerHour / 60.0); + + // Get the last time this bucket was added to + final String keyLastUpdate = String.format("%s:last_update",key); + long lastUpdate = longFromKey(jedis, keyLastUpdate); + long deltaTime = currentTime - lastUpdate; + // Get the current number of tokens in the bucket + long tokens = longFromKey(jedis, key); + long tokensToAdd = (long) (deltaTime * tokensPerMinute); + + if (tokensToAdd > 0) { // Don't update timestamp if we aren't adding any tokens to the bucket + tokens = min(capacityPerHour, tokens + tokensToAdd); + jedis.set(keyLastUpdate, String.valueOf(currentTime)); + } + + // Update with any added tokens and decrement 1 token for this call if not rate limited (0 tokens) + jedis.set(key, String.valueOf(max(0, tokens-1))); + jedisPool.returnResource(jedis); + return tokens < 1; + } + + public static int getCapacityByTierAndAction(SystemConfig systemConfig, Integer tier, String action) { + if (rateLimits.isEmpty()) { + init(systemConfig); + } + + return rateLimitMap.containsKey(getMapKey(tier,action)) ? rateLimitMap.get(getMapKey(tier,action)) : + rateLimitMap.containsKey(getMapKey(tier)) ? rateLimitMap.get(getMapKey(tier)) : + getCapacityByTier(systemConfig, tier); + } + + private static void init(SystemConfig systemConfig) { + getRateLimitsFromJson(systemConfig); + /* Convert the List of Rate Limit Settings containing a list of Actions to a fast lookup Map where the key is: + for default if no action defined: "{tier}:" and the value is the default limit for the tier + for each action: "{tier}:{action}" and the value is the limit defined in the setting + */ + rateLimits.forEach(r -> { + r.setDefaultLimit(getCapacityByTier(systemConfig, r.getTier())); + rateLimitMap.put(getMapKey(r.getTier()), r.getDefaultLimitPerHour()); + r.getRateLimitActions().forEach(a -> rateLimitMap.put(getMapKey(r.getTier(), a), r.getLimitPerHour())); + }); + } + + private static void getRateLimitsFromJson(SystemConfig systemConfig) { + ObjectMapper mapper = new ObjectMapper(); + String setting = systemConfig.getRateLimitsJson(); + if (!setting.isEmpty()) { + try { + JsonReader jr = Json.createReader(new StringReader(setting)); + JsonObject obj= jr.readObject(); + JsonArray lst = obj.getJsonArray("rateLimits"); + + rateLimits.addAll(mapper.readValue(lst.toString(), + mapper.getTypeFactory().constructCollectionType(List.class, RateLimitSetting.class))); + } catch (Exception e) { + logger.warning("Unable to parse Rate Limit Json" + ": " + e.getLocalizedMessage()); + rateLimits.add(new RateLimitSetting()); // add a default entry to prevent re-initialization + e.printStackTrace(); + } + } + } + + private static String getMapKey(Integer tier) { + return getMapKey(tier, null); + } + + private static String getMapKey(Integer tier, String action) { + StringBuffer key = new StringBuffer(); + key.append(tier).append(":"); + if (action != null) { + key.append(action); + } + return key.toString(); + } + + private static long longFromKey(Jedis r, String key) { + String l = r.get(key); + return l != null ? Long.parseLong(l) : 0L; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/RateLimitCommandException.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/RateLimitCommandException.java new file mode 100644 index 00000000000..99a665b31ac --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/RateLimitCommandException.java @@ -0,0 +1,16 @@ +package edu.harvard.iq.dataverse.engine.command.exception; + +import edu.harvard.iq.dataverse.engine.command.Command; + +/** + * An exception raised when a command cannot be executed, due to the + * issuing user being rate limited. + * + * @author + */ +public class RateLimitCommandException extends CommandException { + + public RateLimitCommandException(String message, Command aCommand) { + super(message, aCommand); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index b05c88c0be2..2d1667f0cc5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -239,6 +239,10 @@ public enum Key { CVocConf, + // Default calls per hour for each tier. csv format (30,60,...) + RateLimitingDefaultCapacityTiers, + // json defined list of capacities by tier and action list. See RateLimitSetting.java + RateLimitingCapacityByTierAndAction, /** * A link to an installation of https://github.com/IQSS/miniverse or * some other metrics app. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 3f1ec3dd7eb..0f4537ddb99 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1146,11 +1146,50 @@ public Long getTestStorageQuotaLimit() { return settingsService.getValueForKeyAsLong(SettingsServiceBean.Key.StorageQuotaSizeInBytes); } /** - * Should we store tab-delimited files produced during ingest *with* the - * variable name header line included? + * Should we store tab-delimited files produced during ingest *with* the + * variable name header line included? * @return boolean - defaults to false. */ public boolean isStoringIngestedFilesWithHeaders() { return settingsService.isTrueForKey(SettingsServiceBean.Key.StoreIngestedTabularFilesWithVarHeaders, false); } + + /* + RateLimitUtil will parse the json to create a List + */ + public String getRateLimitsJson() { + return settingsService.getValueForKey(SettingsServiceBean.Key.RateLimitingCapacityByTierAndAction, ""); + } + + public Integer getIntFromCSVStringOrDefault(final SettingsServiceBean.Key settingKey, final Integer index, final Integer defaultValue) { + Integer value = defaultValue; + if (settingKey != null && !settingKey.equals("")) { + String csv = settingsService.getValueForKey(settingKey, ""); + try { + int[] values = Arrays.stream(csv.split(",")).mapToInt(Integer::parseInt).toArray(); + value = index > values.length ? defaultValue : Integer.valueOf(values[index]); + } catch (NumberFormatException nfe) { + logger.warning(nfe.getMessage()); + } + } + + return value; + } + + public String getRedisBaseHost() { + String saneDefault = "redis"; + return System.getProperty("DATAVERSE_REDIS_HOST",saneDefault); + } + public String getRedisBasePort() { + String saneDefault = "6379"; + return System.getProperty("DATAVERSE_REDIS_PORT",saneDefault); + } + public String getRedisUser() { + String saneDefault = "default"; + return System.getProperty("DATAVERSE_REDIS_USER",saneDefault); + } + public String getRedisPassword() { + String saneDefault = "redis_secret"; + return System.getProperty("DATAVERSE_REDIS_PASSWORD",saneDefault); + } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 17dd0933f55..1b9ffd53e55 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2629,6 +2629,7 @@ pid.allowedCharacters=^[A-Za-z0-9._/:\\-]* command.exception.only.superusers={1} can only be called by superusers. command.exception.user.deactivated={0} failed: User account has been deactivated. command.exception.user.deleted={0} failed: User account has been deleted. +command.exception.user.ratelimited={0} failed: Rate limited due to too many requests. #Admin-API admin.api.auth.mustBeSuperUser=Forbidden. You must be a superuser. diff --git a/src/main/resources/db/migration/V6.1.0.1__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.1__9356-add-rate-limiting.sql new file mode 100644 index 00000000000..ae30fd96bfd --- /dev/null +++ b/src/main/resources/db/migration/V6.1.0.1__9356-add-rate-limiting.sql @@ -0,0 +1 @@ +ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS ratelimittier int DEFAULT 1; \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java new file mode 100644 index 00000000000..fa27ea6d4fd --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -0,0 +1,114 @@ +package edu.harvard.iq.dataverse.cache; + +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.util.SystemConfig; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.lenient; + +@ExtendWith(MockitoExtension.class) +public class CacheFactoryBeanTest { + + @Mock + SystemConfig systemConfig; + @InjectMocks + CacheFactoryBean cache = new CacheFactoryBean(); + AuthenticatedUser authUser = new AuthenticatedUser(); + String action; + + @BeforeEach + public void setup() { + lenient().doReturn("localhost").when(systemConfig).getRedisBaseHost(); + lenient().doReturn("6379").when(systemConfig).getRedisBasePort(); + lenient().doReturn("default").when(systemConfig).getRedisUser(); + lenient().doReturn("redis_secret").when(systemConfig).getRedisPassword(); + lenient().doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); + lenient().doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); + lenient().doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); + + cache.init(); + authUser.setRateLimitTier(1); // reset to default + action = "cmd-" + UUID.randomUUID(); + } + @Test + public void testGuestUserGettingRateLimited() throws InterruptedException { + User user = GuestUser.get(); + boolean rateLimited = false; + int cnt = 0; + for (; cnt <100; cnt++) { + rateLimited = !cache.checkRate(user, action); + if (rateLimited) { + break; + } + } + assertTrue(rateLimited && cnt > 1 && cnt <= 30); + } + + @Test + public void testAdminUserExemptFromGettingRateLimited() throws InterruptedException { + authUser.setSuperuser(true); + authUser.setUserIdentifier("admin"); + + boolean rateLimited = false; + int cnt = 0; + for (; cnt <100; cnt++) { + rateLimited = !cache.checkRate(authUser, action); + if (rateLimited) { + break; + } + } + assertTrue(!rateLimited && cnt >= 99); + } + + @Test + public void testAuthenticatedUserGettingRateLimited() throws InterruptedException { + authUser.setSuperuser(false); + authUser.setUserIdentifier("authUser"); + authUser.setRateLimitTier(2); // 120 cals per hour - 1 added token every 30 seconds + boolean limited = false; + int cnt; + for (cnt = 0; cnt <200; cnt++) { + limited = !cache.checkRate(authUser, action); + if (limited) { + break; + } + } + assertTrue(limited && cnt == 120); + + for (cnt = 0; cnt <60; cnt++) { + Thread.sleep(1000);// wait for bucket to be replenished (check each second for 1 minute max) + limited = !cache.checkRate(authUser, action); + if (!limited) { + break; + } + } + assertTrue(!limited && cnt > 15, "cnt:" + cnt); + } + + @Test + public void testAuthenticatedUserWithRateLimitingOff() throws InterruptedException { + lenient().doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); + authUser.setSuperuser(false); + authUser.setUserIdentifier("user1"); + boolean rateLimited = false; + int cnt = 0; + for (; cnt <100; cnt++) { + rateLimited = !cache.checkRate(authUser, action); + if (rateLimited) { + break; + } + } + assertTrue(!rateLimited && cnt > 99); + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java new file mode 100644 index 00000000000..d51fe7471e3 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java @@ -0,0 +1,95 @@ +package edu.harvard.iq.dataverse.cache; + +import edu.harvard.iq.dataverse.util.SystemConfig; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.lenient; + +@ExtendWith(MockitoExtension.class) +public class RateLimitUtilTest { + + @Mock + SystemConfig systemConfig; + + static final String settingJson = "{\n" + + " \"rateLimits\":[\n" + + " {\n" + + " \"tier\": 0,\n" + + " \"limitPerHour\": 10,\n" + + " \"actions\": [\n" + + " \"GetLatestPublishedDatasetVersionCommand\",\n" + + " \"GetPrivateUrlCommand\",\n" + + " \"GetDatasetCommand\",\n" + + " \"GetLatestAccessibleDatasetVersionCommand\"\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"tier\": 0,\n" + + " \"limitPerHour\": 1,\n" + + " \"actions\": [\n" + + " \"CreateGuestbookResponseCommand\",\n" + + " \"UpdateDatasetVersionCommand\",\n" + + " \"DestroyDatasetCommand\",\n" + + " \"DeleteDataFileCommand\",\n" + + " \"FinalizeDatasetPublicationCommand\",\n" + + " \"PublishDatasetCommand\"\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"tier\": 1,\n" + + " \"limitPerHour\": 30,\n" + + " \"actions\": [\n" + + " \"CreateGuestbookResponseCommand\",\n" + + " \"GetLatestPublishedDatasetVersionCommand\",\n" + + " \"GetPrivateUrlCommand\",\n" + + " \"GetDatasetCommand\",\n" + + " \"GetLatestAccessibleDatasetVersionCommand\",\n" + + " \"UpdateDatasetVersionCommand\",\n" + + " \"DestroyDatasetCommand\",\n" + + " \"DeleteDataFileCommand\",\n" + + " \"FinalizeDatasetPublicationCommand\",\n" + + " \"PublishDatasetCommand\"\n" + + " ]\n" + + " }\n" + + " ]\n" + + "}"; + static final String settingJsonBad = "{\n"; + + @BeforeEach + public void setup() { + lenient().doReturn(100).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); + lenient().doReturn(200).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); + lenient().doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); + RateLimitUtil.rateLimitMap.clear(); + RateLimitUtil.rateLimits.clear(); + } + @Test + public void testConfig() { + lenient().doReturn(settingJson).when(systemConfig).getRateLimitsJson(); + assertEquals(100, RateLimitUtil.getCapacityByTier(systemConfig, 0)); + assertEquals(200, RateLimitUtil.getCapacityByTier(systemConfig, 1)); + assertEquals(1, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, "DestroyDatasetCommand")); + assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, "Default Limit")); + + assertEquals(30, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); + assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 1, "Default Limit")); + + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 2, "Default No Limit")); + } + @Test + public void testBadJson() { + lenient().doReturn(settingJsonBad).when(systemConfig).getRateLimitsJson(); + assertEquals(100, RateLimitUtil.getCapacityByTier(systemConfig, 0)); + assertEquals(200, RateLimitUtil.getCapacityByTier(systemConfig, 1)); + assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, "GetLatestAccessibleDatasetVersionCommand")); + assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 2, "GetLatestAccessibleDatasetVersionCommand")); + } +} From c657eb0a6d3424b12375f46d0245d4940af016fe Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 5 Jan 2024 11:12:28 -0500 Subject: [PATCH 0828/1112] fixing tests --- .../java/edu/harvard/iq/dataverse/api/AbstractApiBean.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 44629d5dd76..b7305a24f69 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -783,7 +783,10 @@ protected Response badRequest( String msg ) { protected Response forbidden( String msg ) { return error( Status.FORBIDDEN, msg ); } - protected Response rateLimited( String msg ) { return error( Status.TOO_MANY_REQUESTS, msg ); } + + protected Response rateLimited( String msg ) { + return error( Status.TOO_MANY_REQUESTS, msg ); + } protected Response conflict( String msg ) { return error( Status.CONFLICT, msg ); From f5e00706cd400619845fff4ef2e1f992e69fe56f Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 5 Jan 2024 14:01:44 -0500 Subject: [PATCH 0829/1112] fixing tests --- pom.xml | 6 +++++ .../dataverse/cache/CacheFactoryBeanTest.java | 25 +++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index de7e12cbfa6..7ae274bc42e 100644 --- a/pom.xml +++ b/pom.xml @@ -660,6 +660,12 @@ 3.9.0 test + + ai.grakn + redis-mock + 0.1.3 + test + diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index fa27ea6d4fd..eabc9cd4c2c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -1,9 +1,12 @@ package edu.harvard.iq.dataverse.cache; +import ai.grakn.redismock.RedisServer; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -11,6 +14,7 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import java.io.IOException; import java.util.UUID; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -26,13 +30,14 @@ public class CacheFactoryBeanTest { CacheFactoryBean cache = new CacheFactoryBean(); AuthenticatedUser authUser = new AuthenticatedUser(); String action; + static RedisServer mockRedisServer; @BeforeEach - public void setup() { - lenient().doReturn("localhost").when(systemConfig).getRedisBaseHost(); - lenient().doReturn("6379").when(systemConfig).getRedisBasePort(); - lenient().doReturn("default").when(systemConfig).getRedisUser(); - lenient().doReturn("redis_secret").when(systemConfig).getRedisPassword(); + public void setup() throws IOException { + lenient().doReturn(mockRedisServer.getHost()).when(systemConfig).getRedisBaseHost(); + lenient().doReturn(String.valueOf(mockRedisServer.getBindPort())).when(systemConfig).getRedisBasePort(); + lenient().doReturn(null).when(systemConfig).getRedisUser(); + lenient().doReturn(null).when(systemConfig).getRedisPassword(); lenient().doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); lenient().doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); lenient().doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); @@ -41,6 +46,16 @@ public void setup() { authUser.setRateLimitTier(1); // reset to default action = "cmd-" + UUID.randomUUID(); } + @BeforeAll + public static void init() throws IOException { + mockRedisServer = RedisServer.newRedisServer(); + mockRedisServer.start(); + } + @AfterAll + public static void cleanup() { + if (mockRedisServer != null) + mockRedisServer.stop(); + } @Test public void testGuestUserGettingRateLimited() throws InterruptedException { User user = GuestUser.get(); From 1b0a55496bb0570326e2eb5b340a566325282b72 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 5 Jan 2024 14:05:21 -0500 Subject: [PATCH 0830/1112] fixing tests --- .../edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index eabc9cd4c2c..f2d14afc488 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -108,7 +108,7 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio break; } } - assertTrue(!limited && cnt > 15, "cnt:" + cnt); + assertTrue(!limited); } @Test From a53462736e95859b464e62985ac77d7c951d4700 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 9 Jan 2024 11:26:31 -0500 Subject: [PATCH 0831/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Philip Durbin --- doc/release-notes/9356-rate-limiting.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 6b40ed7498c..970a5fc8218 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -1,4 +1,4 @@ -Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Administrator/Superuser accounts are exempt from rate limiting. +Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. Two database settings configure the rate limiting. RateLimitingDefaultCapacityTiers is a comma separated list of default values for each tier. In the following example, the default for tier 0 (guest users) is set to 10,000 calls per command per hour and tier 1 (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to -1 (No Limit). From c80f74aceb502b68fdb681b177f31c478d1c1a0c Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 9 Jan 2024 11:48:10 -0500 Subject: [PATCH 0832/1112] fixing review comments --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 0f4537ddb99..dc9dbab097c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1166,7 +1166,7 @@ public Integer getIntFromCSVStringOrDefault(final SettingsServiceBean.Key settin if (settingKey != null && !settingKey.equals("")) { String csv = settingsService.getValueForKey(settingKey, ""); try { - int[] values = Arrays.stream(csv.split(",")).mapToInt(Integer::parseInt).toArray(); + int[] values = csv.isEmpty() ? new int[0] : Arrays.stream(csv.split(",")).mapToInt(Integer::parseInt).toArray(); value = index > values.length ? defaultValue : Integer.valueOf(values[index]); } catch (NumberFormatException nfe) { logger.warning(nfe.getMessage()); From 12c15b58776b897fd398a70a6144ef2b343dcf0b Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 9 Jan 2024 17:31:24 -0500 Subject: [PATCH 0833/1112] review comment fixes --- doc/release-notes/9356-rate-limiting.md | 9 ++- .../source/installation/config.rst | 71 +++++++++++++++++++ ...l => V6.1.0.2__9356-add-rate-limiting.sql} | 0 3 files changed, 77 insertions(+), 3 deletions(-) rename src/main/resources/db/migration/{V6.1.0.1__9356-add-rate-limiting.sql => V6.1.0.2__9356-add-rate-limiting.sql} (100%) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 970a5fc8218..c89a87f83bd 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -1,6 +1,9 @@ -Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. +Rate Limiting using Redis Server +The option to Rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. Two database settings configure the rate limiting. +Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. + RateLimitingDefaultCapacityTiers is a comma separated list of default values for each tier. In the following example, the default for tier 0 (guest users) is set to 10,000 calls per command per hour and tier 1 (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to -1 (No Limit). curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000' @@ -8,5 +11,5 @@ RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' -Rate Limiting cache is handled by a Redis server. The following system setting are used to configure access to the server: -DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_POST; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. \ No newline at end of file +Rate Limiting cache is handled by a Redis server. The following environment variables are used to configure access to the server: +DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_PORT; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. \ No newline at end of file diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2baa2827250..79df6e76b28 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1373,6 +1373,77 @@ Before being moved there, on your machine, large file uploads via API will cause RAM and/or swap usage bursts. You might want to point this to a different location, restrict maximum size of it, and monitor for stale uploads. +.. _redis-cache-rate-limiting: + +Configure Your Dataverse Installation to use Redis for rate limiting +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. +Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. +Superuser accounts are exempt from rate limiting. +Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. +Two database settings configure the rate limiting. +Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. + +- RateLimitingDefaultCapacityTiers is the number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... + A value of -1 can be used to signify no rate limit. Also, by default, a tier not defined would receive a default of no limit. + ``curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'`` + +- RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. + In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. + curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' + +.. code-block:: json + { + "rateLimits": [ + { + "tier": 0, + "limitPerHour": 10, + "actions": [ + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand" + ] + }, + { + "tier": 0, + "limitPerHour": 1, + "actions": [ + "CreateGuestbookResponseCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + }, + { + "tier": 1, + "limitPerHour": 30, + "actions": [ + "CreateGuestbookResponseCommand", + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + } + ] + } + +- Redis server configuration is handled through environment variables. The following environment variables are used to configure access to the server: + DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_PORT; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. + Defaults for docker testing: + DATAVERSE_REDIS_HOST: "redis" + DATAVERSE_REDIS_PORT: "6379" + DATAVERSE_REDIS_USER: "default" + DATAVERSE_REDIS_PASSWORD: "redis_secret" .. _Branding Your Installation: diff --git a/src/main/resources/db/migration/V6.1.0.1__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.1__9356-add-rate-limiting.sql rename to src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql From a178929fb1964be56d9e27d83ed1663f246bb100 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 9 Jan 2024 17:38:23 -0500 Subject: [PATCH 0834/1112] review comment fixes --- .../source/installation/config.rst | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 79df6e76b28..46265160ed6 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1393,50 +1393,6 @@ Note: If either of these settings exist in the database rate limiting will be en In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' -.. code-block:: json - { - "rateLimits": [ - { - "tier": 0, - "limitPerHour": 10, - "actions": [ - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand" - ] - }, - { - "tier": 0, - "limitPerHour": 1, - "actions": [ - "CreateGuestbookResponseCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] - }, - { - "tier": 1, - "limitPerHour": 30, - "actions": [ - "CreateGuestbookResponseCommand", - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] - } - ] - } - - Redis server configuration is handled through environment variables. The following environment variables are used to configure access to the server: DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_PORT; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. Defaults for docker testing: From 4684384ed67bd60352bde3c359230fd96d8c4123 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 9 Jan 2024 17:41:43 -0500 Subject: [PATCH 0835/1112] review comment fixes --- doc/sphinx-guides/source/installation/config.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 46265160ed6..130af770a46 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1396,10 +1396,10 @@ Note: If either of these settings exist in the database rate limiting will be en - Redis server configuration is handled through environment variables. The following environment variables are used to configure access to the server: DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_PORT; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. Defaults for docker testing: - DATAVERSE_REDIS_HOST: "redis" - DATAVERSE_REDIS_PORT: "6379" - DATAVERSE_REDIS_USER: "default" - DATAVERSE_REDIS_PASSWORD: "redis_secret" + DATAVERSE_REDIS_HOST: "redis" + DATAVERSE_REDIS_PORT: "6379" + DATAVERSE_REDIS_USER: "default" + DATAVERSE_REDIS_PASSWORD: "redis_secret" .. _Branding Your Installation: From 1e44206bb8c8edc46c874b815bb3074bb588b142 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 11 Jan 2024 11:55:10 -0500 Subject: [PATCH 0836/1112] fixes to get DatasetsIT to pass --- .../edu/harvard/iq/dataverse/UserServiceBean.java | 2 +- .../authorization/users/AuthenticatedUser.java | 8 ++++---- .../harvard/iq/dataverse/cache/RateLimitUtil.java | 2 +- .../edu/harvard/iq/dataverse/util/SystemConfig.java | 13 ++++++++----- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java index 50680b67cee..47aebb78a35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java @@ -147,7 +147,7 @@ private AuthenticatedUser createAuthenticatedUserForView (Object[] dbRowValues, user.setMutedEmails(Type.tokenizeToSet((String) dbRowValues[15])); user.setMutedNotifications(Type.tokenizeToSet((String) dbRowValues[15])); - user.setRateLimitTier(Integer.valueOf((int)dbRowValues[16])); + user.setRateLimitTier((int)dbRowValues[16]); user.setRoles(roles); return user; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index ff884926a1f..0ed036afc6b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -146,8 +146,8 @@ public class AuthenticatedUser implements User, Serializable { @Transient private Set mutedNotificationsSet = new HashSet<>(); - @Column(nullable=true) - private Integer rateLimitTier; + @Column + private int rateLimitTier; @PrePersist void prePersist() { @@ -400,10 +400,10 @@ public void setDeactivatedTime(Timestamp deactivatedTime) { this.deactivatedTime = deactivatedTime; } - public Integer getRateLimitTier() { + public int getRateLimitTier() { return rateLimitTier; } - public void setRateLimitTier(Integer rateLimitTier) { + public void setRateLimitTier(int rateLimitTier) { this.rateLimitTier = rateLimitTier; } diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index b97773e0312..afc0b323da0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -23,7 +23,7 @@ public class RateLimitUtil { protected static final Map rateLimitMap = new HashMap<>(); public static final int NO_LIMIT = -1; - public static int getCapacityByTier(SystemConfig systemConfig, Integer tier) { + public static int getCapacityByTier(SystemConfig systemConfig, int tier) { return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index dc9dbab097c..37eec5a1e80 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1161,18 +1161,21 @@ public String getRateLimitsJson() { return settingsService.getValueForKey(SettingsServiceBean.Key.RateLimitingCapacityByTierAndAction, ""); } - public Integer getIntFromCSVStringOrDefault(final SettingsServiceBean.Key settingKey, final Integer index, final Integer defaultValue) { - Integer value = defaultValue; + public int getIntFromCSVStringOrDefault(final SettingsServiceBean.Key settingKey, int index, int defaultValue) { + int value = defaultValue; if (settingKey != null && !settingKey.equals("")) { String csv = settingsService.getValueForKey(settingKey, ""); try { - int[] values = csv.isEmpty() ? new int[0] : Arrays.stream(csv.split(",")).mapToInt(Integer::parseInt).toArray(); - value = index > values.length ? defaultValue : Integer.valueOf(values[index]); + if (!csv.isEmpty()) { + int[] values = Arrays.stream(csv.split(",")).mapToInt(Integer::parseInt).toArray(); + if (index < values.length) { + value = values[index]; + } + } } catch (NumberFormatException nfe) { logger.warning(nfe.getMessage()); } } - return value; } From 77074cc45de9e5ec8e5cdb3de404ab312bb358a7 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 18 Jan 2024 11:32:31 -0500 Subject: [PATCH 0837/1112] fix mock for redis tests --- .../edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index f2d14afc488..579da3f97a7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -41,6 +41,7 @@ public void setup() throws IOException { lenient().doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); lenient().doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); lenient().doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); + lenient().doReturn("").when(systemConfig).getRateLimitsJson(); cache.init(); authUser.setRateLimitTier(1); // reset to default From 4cdba95b80f3492c024e9ab649fd7f7c35a224e2 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 18 Jan 2024 12:54:08 -0500 Subject: [PATCH 0838/1112] fix mock for redis tests --- .../dataverse/cache/CacheFactoryBeanTest.java | 44 ++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 579da3f97a7..769b7ce5859 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -31,6 +31,48 @@ public class CacheFactoryBeanTest { AuthenticatedUser authUser = new AuthenticatedUser(); String action; static RedisServer mockRedisServer; + static final String settingJson = "{\n" + + " \"rateLimits\":[\n" + + " {\n" + + " \"tier\": 0,\n" + + " \"limitPerHour\": 10,\n" + + " \"actions\": [\n" + + " \"GetLatestPublishedDatasetVersionCommand\",\n" + + " \"GetPrivateUrlCommand\",\n" + + " \"GetDatasetCommand\",\n" + + " \"GetLatestAccessibleDatasetVersionCommand\"\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"tier\": 0,\n" + + " \"limitPerHour\": 1,\n" + + " \"actions\": [\n" + + " \"CreateGuestbookResponseCommand\",\n" + + " \"UpdateDatasetVersionCommand\",\n" + + " \"DestroyDatasetCommand\",\n" + + " \"DeleteDataFileCommand\",\n" + + " \"FinalizeDatasetPublicationCommand\",\n" + + " \"PublishDatasetCommand\"\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"tier\": 1,\n" + + " \"limitPerHour\": 30,\n" + + " \"actions\": [\n" + + " \"CreateGuestbookResponseCommand\",\n" + + " \"GetLatestPublishedDatasetVersionCommand\",\n" + + " \"GetPrivateUrlCommand\",\n" + + " \"GetDatasetCommand\",\n" + + " \"GetLatestAccessibleDatasetVersionCommand\",\n" + + " \"UpdateDatasetVersionCommand\",\n" + + " \"DestroyDatasetCommand\",\n" + + " \"DeleteDataFileCommand\",\n" + + " \"FinalizeDatasetPublicationCommand\",\n" + + " \"PublishDatasetCommand\"\n" + + " ]\n" + + " }\n" + + " ]\n" + + "}"; @BeforeEach public void setup() throws IOException { @@ -41,7 +83,7 @@ public void setup() throws IOException { lenient().doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); lenient().doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); lenient().doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); - lenient().doReturn("").when(systemConfig).getRateLimitsJson(); + lenient().doReturn(settingJson).when(systemConfig).getRateLimitsJson(); cache.init(); authUser.setRateLimitTier(1); // reset to default From 1b4f613c46fa1e51d9d50f32717fceb99979346a Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 09:19:49 -0500 Subject: [PATCH 0839/1112] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 130af770a46..ab22451a210 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1375,8 +1375,8 @@ Before being moved there, .. _redis-cache-rate-limiting: -Configure Your Dataverse Installation to use Redis for rate limiting -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Configure Your Dataverse Installation to use Redis for Rate Limiting +-------------------------------------------------------------------- Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. From 2b603a60db0595adb6f4fc6fcda270f15a9215eb Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 24 Jan 2024 09:18:33 -0500 Subject: [PATCH 0840/1112] fixes from comments --- src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java | 2 +- .../edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index afc0b323da0..ee76342dc17 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -41,7 +41,7 @@ public static boolean rateLimited(final JedisPool jedisPool, final String key, i } long currentTime = System.currentTimeMillis() / 60000L; // convert to minutes - int tokensPerMinute = (int)Math.ceil(capacityPerHour / 60.0); + double tokensPerMinute = (capacityPerHour / 60.0); // Get the last time this bucket was added to final String keyLastUpdate = String.format("%s:last_update",key); diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 769b7ce5859..d6be3dcf831 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -76,7 +76,7 @@ public class CacheFactoryBeanTest { @BeforeEach public void setup() throws IOException { - lenient().doReturn(mockRedisServer.getHost()).when(systemConfig).getRedisBaseHost(); + lenient().doReturn("127.0.0.1").when(systemConfig).getRedisBaseHost(); lenient().doReturn(String.valueOf(mockRedisServer.getBindPort())).when(systemConfig).getRedisBasePort(); lenient().doReturn(null).when(systemConfig).getRedisUser(); lenient().doReturn(null).when(systemConfig).getRedisPassword(); From 13e301148c1bf7fe392c3a422daee3438025144b Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:47:14 -0500 Subject: [PATCH 0841/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index c89a87f83bd..75c47adeb4d 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -1,4 +1,4 @@ -Rate Limiting using Redis Server +## Rate Limiting using Redis Cache The option to Rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. Two database settings configure the rate limiting. From 0467f4c23176305c991c96286254fe00ae4f747e Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:47:47 -0500 Subject: [PATCH 0842/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 75c47adeb4d..028ff442520 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -1,5 +1,7 @@ ## Rate Limiting using Redis Cache -The option to Rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. +The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. +Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. +Superuser accounts are exempt from rate limiting. Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. Two database settings configure the rate limiting. Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. From 5253de8d777be30fdb1975af75330194eb49feb3 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:48:09 -0500 Subject: [PATCH 0843/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 028ff442520..5732a72fbef 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -4,7 +4,8 @@ Rate limiting can be configured on a tier level with tier 0 being reserved for g Superuser accounts are exempt from rate limiting. Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. Two database settings configure the rate limiting. -Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. +Note: If either of these settings exist in the database rate limiting will be enabled. +If neither setting exists rate limiting is disabled. RateLimitingDefaultCapacityTiers is a comma separated list of default values for each tier. In the following example, the default for tier 0 (guest users) is set to 10,000 calls per command per hour and tier 1 (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to -1 (No Limit). curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000' From 13fdd8837cf6c589df81250340c153407e7fc40c Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:48:38 -0500 Subject: [PATCH 0844/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 5732a72fbef..d593bdacbbf 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -7,7 +7,9 @@ Two database settings configure the rate limiting. Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. -RateLimitingDefaultCapacityTiers is a comma separated list of default values for each tier. In the following example, the default for tier 0 (guest users) is set to 10,000 calls per command per hour and tier 1 (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to -1 (No Limit). +`RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. +In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. +Tiers not specified in this setting will default to `-1` (No Limit). curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000' RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. From dd30c7b96f00b8aec5eb8a82cedb1594db530852 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:48:57 -0500 Subject: [PATCH 0845/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index d593bdacbbf..6e060117db4 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -10,7 +10,7 @@ If neither setting exists rate limiting is disabled. `RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to `-1` (No Limit). -curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000' +`curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. From 23606a066dd07b4e565bc8a49ca1e66389aeaa31 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:49:14 -0500 Subject: [PATCH 0846/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 6e060117db4..d6653de2d12 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -12,7 +12,8 @@ In the following example, the default for tier `0` (guest users) is set to 10,00 Tiers not specified in this setting will default to `-1` (No Limit). `curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` -RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. +`RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). +This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' From 1bd25560146d4da58a405d981cc2cab509926350 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:49:36 -0500 Subject: [PATCH 0847/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index d6653de2d12..427e2e846b7 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -14,7 +14,7 @@ Tiers not specified in this setting will default to `-1` (No Limit). `RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. -In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. +In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' Rate Limiting cache is handled by a Redis server. The following environment variables are used to configure access to the server: From c04db0ab42801c035d45091e6f5cc24acc9f48ac Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:49:53 -0500 Subject: [PATCH 0848/1112] Update doc/release-notes/9356-rate-limiting.md Co-authored-by: Oliver Bertuch --- doc/release-notes/9356-rate-limiting.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 427e2e846b7..49fbfc2621c 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -15,7 +15,7 @@ Tiers not specified in this setting will default to `-1` (No Limit). `RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. -curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' +`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}'` Rate Limiting cache is handled by a Redis server. The following environment variables are used to configure access to the server: DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_PORT; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. \ No newline at end of file From 3dfc2a04ddfee83720355b60e8c5d6347fb424b7 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 24 Jan 2024 14:55:59 -0500 Subject: [PATCH 0849/1112] adding changes per pr comments --- .../iq/dataverse/cache/CacheFactoryBean.java | 8 ++--- .../iq/dataverse/cache/RateLimitSetting.java | 32 +++++++++---------- .../iq/dataverse/cache/RateLimitUtil.java | 22 ++++++------- 3 files changed, 29 insertions(+), 33 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 83ba7a418e4..8e163d21dfe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -6,15 +6,15 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.annotation.PostConstruct; import jakarta.ejb.EJB; -import jakarta.ejb.Stateless; -import jakarta.inject.Named; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import java.util.logging.Logger; -@Stateless -@Named +@Singleton +@Startup public class CacheFactoryBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); private static JedisPool jedisPool = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java index 14a4439bb56..752f9860127 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java @@ -1,48 +1,46 @@ package edu.harvard.iq.dataverse.cache; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import jakarta.json.bind.annotation.JsonbProperty; import java.util.ArrayList; import java.util.List; -@JsonInclude(JsonInclude.Include.NON_NULL) public class RateLimitSetting { - @JsonProperty("tier") + @JsonbProperty("tier") private int tier; - @JsonProperty("limitPerHour") + @JsonbProperty("limitPerHour") private int limitPerHour = RateLimitUtil.NO_LIMIT; - @JsonProperty("actions") - private List rateLimitActions = new ArrayList<>(); + @JsonbProperty("actions") + private List actions = new ArrayList<>(); private int defaultLimitPerHour; public RateLimitSetting() {} - @JsonProperty("tier") + @JsonbProperty("tier") public void setTier(int tier) { this.tier = tier; } - @JsonProperty("tier") + @JsonbProperty("tier") public int getTier() { return this.tier; } - @JsonProperty("limitPerHour") + @JsonbProperty("limitPerHour") public void setLimitPerHour(int limitPerHour) { this.limitPerHour = limitPerHour; } - @JsonProperty("limitPerHour") + @JsonbProperty("limitPerHour") public int getLimitPerHour() { return this.limitPerHour; } - @JsonProperty("actions") - public void setRateLimitActions(List rateLimitActions) { - this.rateLimitActions = rateLimitActions; + @JsonbProperty("actions") + public void setActions(List actions) { + this.actions = actions; } - @JsonProperty("actions") - public List getRateLimitActions() { - return this.rateLimitActions; + @JsonbProperty("actions") + public List getActions() { + return this.actions; } public void setDefaultLimit(int defaultLimitPerHour) { this.defaultLimitPerHour = defaultLimitPerHour; diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index ee76342dc17..0bde961fa82 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -1,17 +1,16 @@ package edu.harvard.iq.dataverse.cache; -import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.gson.Gson; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; -import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonObject; -import jakarta.json.JsonReader; +import jakarta.json.*; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import java.io.StringReader; import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.logging.Logger; import static java.lang.Math.max; @@ -19,8 +18,9 @@ public class RateLimitUtil { private static final Logger logger = Logger.getLogger(RateLimitUtil.class.getCanonicalName()); - protected static final List rateLimits = new ArrayList<>(); - protected static final Map rateLimitMap = new HashMap<>(); + protected static final List rateLimits = new CopyOnWriteArrayList<>(); + protected static final Map rateLimitMap = new ConcurrentHashMap<>(); + private static final Gson gson = new Gson(); public static final int NO_LIMIT = -1; public static int getCapacityByTier(SystemConfig systemConfig, int tier) { @@ -81,21 +81,19 @@ private static void init(SystemConfig systemConfig) { rateLimits.forEach(r -> { r.setDefaultLimit(getCapacityByTier(systemConfig, r.getTier())); rateLimitMap.put(getMapKey(r.getTier()), r.getDefaultLimitPerHour()); - r.getRateLimitActions().forEach(a -> rateLimitMap.put(getMapKey(r.getTier(), a), r.getLimitPerHour())); + r.getActions().forEach(a -> rateLimitMap.put(getMapKey(r.getTier(), a), r.getLimitPerHour())); }); } private static void getRateLimitsFromJson(SystemConfig systemConfig) { - ObjectMapper mapper = new ObjectMapper(); String setting = systemConfig.getRateLimitsJson(); if (!setting.isEmpty()) { try { JsonReader jr = Json.createReader(new StringReader(setting)); JsonObject obj= jr.readObject(); JsonArray lst = obj.getJsonArray("rateLimits"); - - rateLimits.addAll(mapper.readValue(lst.toString(), - mapper.getTypeFactory().constructCollectionType(List.class, RateLimitSetting.class))); + rateLimits.addAll(gson.fromJson(String.valueOf(lst), + new ArrayList() {}.getClass().getGenericSuperclass())); } catch (Exception e) { logger.warning("Unable to parse Rate Limit Json" + ": " + e.getLocalizedMessage()); rateLimits.add(new RateLimitSetting()); // add a default entry to prevent re-initialization From 727cccf3c121118bc02977c3c7c681ee0df85ab8 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 26 Jan 2024 13:29:03 -0500 Subject: [PATCH 0850/1112] remove redis and replace with jcache hazelcast --- doc/release-notes/9356-rate-limiting.md | 5 +-- .../source/installation/config.rst | 14 ++----- docker-compose-dev.yml | 17 -------- pom.xml | 24 ++++++----- scripts/installer/default.config | 6 --- scripts/installer/install.py | 3 +- scripts/installer/installAppServer.py | 6 --- scripts/installer/interactive.config | 8 ---- .../iq/dataverse/cache/CacheFactoryBean.java | 30 +++++++++----- .../iq/dataverse/cache/RateLimitUtil.java | 38 ++++++------------ .../iq/dataverse/util/SystemConfig.java | 17 -------- .../dataverse/cache/CacheFactoryBeanTest.java | 40 +++++-------------- 12 files changed, 59 insertions(+), 149 deletions(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 49fbfc2621c..d7b9d2defcf 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -1,4 +1,4 @@ -## Rate Limiting using Redis Cache +## Rate Limiting using JCache (with Hazelcast as a provider) The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. @@ -16,6 +16,3 @@ Tiers not specified in this setting will default to `-1` (No Limit). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. `curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}'` - -Rate Limiting cache is handled by a Redis server. The following environment variables are used to configure access to the server: -DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_PORT; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. \ No newline at end of file diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index ab22451a210..c60953c66f5 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1373,10 +1373,10 @@ Before being moved there, on your machine, large file uploads via API will cause RAM and/or swap usage bursts. You might want to point this to a different location, restrict maximum size of it, and monitor for stale uploads. -.. _redis-cache-rate-limiting: +.. _cache-rate-limiting: -Configure Your Dataverse Installation to use Redis for Rate Limiting --------------------------------------------------------------------- +Configure Your Dataverse Installation to use JCache (with Hazelcast as a provider) for Rate Limiting +---------------------------------------------------------------------------------------------------- Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. @@ -1393,14 +1393,6 @@ Note: If either of these settings exist in the database rate limiting will be en In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' -- Redis server configuration is handled through environment variables. The following environment variables are used to configure access to the server: - DATAVERSE_REDIS_HOST; DATAVERSE_REDIS_PORT; DATAVERSE_REDIS_USER; DATAVERSE_REDIS_PASSWORD. - Defaults for docker testing: - DATAVERSE_REDIS_HOST: "redis" - DATAVERSE_REDIS_PORT: "6379" - DATAVERSE_REDIS_USER: "default" - DATAVERSE_REDIS_PASSWORD: "redis_secret" - .. _Branding Your Installation: Branding Your Installation diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index fcb13609c94..b4a7a510839 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -12,10 +12,6 @@ services: DATAVERSE_DB_HOST: postgres DATAVERSE_DB_PASSWORD: secret DATAVERSE_DB_USER: ${DATAVERSE_DB_USER} - DATAVERSE_REDIS_HOST: "redis" - DATAVERSE_REDIS_PORT: "6379" - DATAVERSE_REDIS_USER: "default" - DATAVERSE_REDIS_PASSWORD: "redis_secret" ENABLE_JDWP: "1" ENABLE_RELOAD: "1" SKIP_DEPLOY: "${SKIP_DEPLOY}" @@ -69,7 +65,6 @@ services: - dev_postgres - dev_solr - dev_dv_initializer - - redis_dev volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets @@ -237,18 +232,6 @@ services: MINIO_ROOT_USER: 4cc355_k3y MINIO_ROOT_PASSWORD: s3cr3t_4cc355_k3y command: server /data - - dev_redis: - container_name: "redis_dev" - hostname: "redis" - image: redis/redis-stack:latest - restart: always - ports: - - "6379:6379" - networks: - - dataverse - command: ["redis-server","--bind","redis","--port","6379","--requirepass","redis_secret" ] - networks: dataverse: driver: bridge diff --git a/pom.xml b/pom.xml index 7ae274bc42e..4a2bc13dbc7 100644 --- a/pom.xml +++ b/pom.xml @@ -542,13 +542,21 @@ dataverse-spi 2.0.0 - - redis.clients - jedis - 5.1.0 + javax.cache + cache-api + 1.1.1 + + + com.hazelcast + hazelcast + 5.3.6 + + + xerces + xercesImpl + 2.11.0 - org.junit.jupiter @@ -660,12 +668,6 @@ 3.9.0 test - - ai.grakn - redis-mock - 0.1.3 - test - diff --git a/scripts/installer/default.config b/scripts/installer/default.config index 2a29a1d5270..8647cd02416 100644 --- a/scripts/installer/default.config +++ b/scripts/installer/default.config @@ -32,9 +32,3 @@ DOI_USERNAME = dataciteuser DOI_PASSWORD = datacitepassword DOI_BASEURL = https://mds.test.datacite.org DOI_DATACITERESTAPIURL = https://api.test.datacite.org - -[redis] -REDIS_HOST = redis -REDIS_PORT = 6379 -REDIS_USER = default -REDIS_PASSWORD = redis_secret diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 6d6003607bd..99316efb83b 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -100,8 +100,7 @@ "database", "rserve", "system", - "doi", - "redis"] + "doi"] # read pre-defined defaults: diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py index faa5bf42341..03abc03b05e 100644 --- a/scripts/installer/installAppServer.py +++ b/scripts/installer/installAppServer.py @@ -29,12 +29,6 @@ def runAsadminScript(config): os.environ['DOI_USERNAME'] = config.get('doi','DOI_USERNAME') os.environ['DOI_PASSWORD'] = config.get('doi','DOI_PASSWORD') os.environ['DOI_DATACITERESTAPIURL'] = config.get('doi','DOI_DATACITERESTAPIURL') - - os.environ['REDIS_HOST'] = config.get('redis','REDIS_HOST') - os.environ['REDIS_PORT'] = config.get('redis','REDIS_PORT') - os.environ['REDIS_USER'] = config.get('redis','REDIS_USER') - os.environ['REDIS_PASS'] = config.get('redis','REDIS_PASSWORD') - mailServerEntry = config.get('system','MAIL_SERVER') try: diff --git a/scripts/installer/interactive.config b/scripts/installer/interactive.config index 9e0fafaa8b4..ef8110c554f 100644 --- a/scripts/installer/interactive.config +++ b/scripts/installer/interactive.config @@ -24,10 +24,6 @@ DOI_USERNAME = Datacite username DOI_PASSWORD = Datacite password DOI_BASEURL = Datacite URL DOI_DATACITERESTAPIURL = Datacite REST API URL -REDIS_HOST = Redis Server -REDIS_PORT = Redis Server Port -REDIS_USER = Redis User Name -REDIS_PASSWORD = Redis User Password [comments] HOST_DNS_ADDRESS = :(enter numeric IP address, if FQDN is unavailable) GLASSFISH_USER = :This user will be running the App. Server (Payara) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n: @@ -50,7 +46,3 @@ DOI_USERNAME = DataCite or EZID username. Only necessary for publishing / mintin DOI_PASSWORD = DataCite or EZID account password. DOI_BASEURL = DataCite or EZID URL. Probably https://mds.datacite.org DOI_DATACITERESTAPIURL = DataCite REST API URL (Make Data Count, /pids API). Probably https://api.datacite.org -REDIS_HOST = -REDIS_PORT = -REDIS_USER = -REDIS_PASSWORD = diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 8e163d21dfe..25bc20ec03d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -1,5 +1,8 @@ package edu.harvard.iq.dataverse.cache; +import com.hazelcast.config.Config; +import com.hazelcast.core.Hazelcast; +import com.hazelcast.core.HazelcastInstance; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -8,29 +11,34 @@ import jakarta.ejb.EJB; import jakarta.ejb.Singleton; import jakarta.ejb.Startup; -import redis.clients.jedis.JedisPool; -import redis.clients.jedis.JedisPoolConfig; import java.util.logging.Logger; +import java.util.Map; @Singleton @Startup public class CacheFactoryBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); - private static JedisPool jedisPool = null; + private static HazelcastInstance hazelcastInstance = null; + private static Map rateLimitCache; @EJB SystemConfig systemConfig; + public final static String RATE_LIMIT_CACHE = "rateLimitCache"; + @PostConstruct public void init() { - logger.info("CacheFactoryBean.init Redis Host:Port " + systemConfig.getRedisBaseHost() + ":" + systemConfig.getRedisBasePort()); - jedisPool = new JedisPool(new JedisPoolConfig(), systemConfig.getRedisBaseHost(), Integer.valueOf(systemConfig.getRedisBasePort()), - systemConfig.getRedisUser(), systemConfig.getRedisPassword()); + if (hazelcastInstance == null) { + Config hazelcastConfig = new Config(); + hazelcastConfig.setClusterName("dataverse"); + hazelcastInstance = Hazelcast.newHazelcastInstance(hazelcastConfig); + rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); + } } @Override protected void finalize() throws Throwable { - if (jedisPool != null) { - jedisPool.close(); + if (hazelcastInstance != null) { + hazelcastInstance.shutdown(); } super.finalize(); } @@ -53,8 +61,8 @@ public boolean checkRate(User user, String action) { // get the capacity, i.e. calls per hour, from config int capacity = (user instanceof AuthenticatedUser) ? - RateLimitUtil.getCapacityByTier(systemConfig, ((AuthenticatedUser) user).getRateLimitTier()) : - RateLimitUtil.getCapacityByTier(systemConfig, 0); - return (!RateLimitUtil.rateLimited(jedisPool, id.toString(), capacity)); + RateLimitUtil.getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : + RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, action); + return (!RateLimitUtil.rateLimited(rateLimitCache, id.toString(), capacity)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index 0bde961fa82..0688e4536ee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -3,9 +3,10 @@ import com.google.gson.Gson; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; -import jakarta.json.*; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPool; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; import java.io.StringReader; import java.util.*; @@ -23,42 +24,29 @@ public class RateLimitUtil { private static final Gson gson = new Gson(); public static final int NO_LIMIT = -1; - public static int getCapacityByTier(SystemConfig systemConfig, int tier) { + protected static int getCapacityByTier(SystemConfig systemConfig, int tier) { return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); } - public static boolean rateLimited(final JedisPool jedisPool, final String key, int capacityPerHour) { + public static boolean rateLimited(final Map cache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; } - Jedis jedis; - try { - jedis = jedisPool.getResource(); - } catch (Exception e) { - // We can't rate limit if Redis is not reachable - logger.severe("RateLimitUtil.rateLimited jedisPool.getResource() " + e.getMessage()); - return false; - } - long currentTime = System.currentTimeMillis() / 60000L; // convert to minutes double tokensPerMinute = (capacityPerHour / 60.0); - // Get the last time this bucket was added to final String keyLastUpdate = String.format("%s:last_update",key); - long lastUpdate = longFromKey(jedis, keyLastUpdate); + long lastUpdate = longFromKey(cache, keyLastUpdate); long deltaTime = currentTime - lastUpdate; // Get the current number of tokens in the bucket - long tokens = longFromKey(jedis, key); + long tokens = longFromKey(cache, key); long tokensToAdd = (long) (deltaTime * tokensPerMinute); - if (tokensToAdd > 0) { // Don't update timestamp if we aren't adding any tokens to the bucket tokens = min(capacityPerHour, tokens + tokensToAdd); - jedis.set(keyLastUpdate, String.valueOf(currentTime)); + cache.put(keyLastUpdate, String.valueOf(currentTime)); } - // Update with any added tokens and decrement 1 token for this call if not rate limited (0 tokens) - jedis.set(key, String.valueOf(max(0, tokens-1))); - jedisPool.returnResource(jedis); + cache.put(key, String.valueOf(max(0, tokens-1))); return tokens < 1; } @@ -115,8 +103,8 @@ private static String getMapKey(Integer tier, String action) { return key.toString(); } - private static long longFromKey(Jedis r, String key) { - String l = r.get(key); - return l != null ? Long.parseLong(l) : 0L; + private static long longFromKey(Map cache, String key) { + Object l = cache.get(key); + return l != null ? Long.parseLong(String.valueOf(l)) : 0L; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 37eec5a1e80..9f4bd7c2e62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1178,21 +1178,4 @@ public int getIntFromCSVStringOrDefault(final SettingsServiceBean.Key settingKey } return value; } - - public String getRedisBaseHost() { - String saneDefault = "redis"; - return System.getProperty("DATAVERSE_REDIS_HOST",saneDefault); - } - public String getRedisBasePort() { - String saneDefault = "6379"; - return System.getProperty("DATAVERSE_REDIS_PORT",saneDefault); - } - public String getRedisUser() { - String saneDefault = "default"; - return System.getProperty("DATAVERSE_REDIS_USER",saneDefault); - } - public String getRedisPassword() { - String saneDefault = "redis_secret"; - return System.getProperty("DATAVERSE_REDIS_PASSWORD",saneDefault); - } } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index d6be3dcf831..6241674dd7a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -1,12 +1,9 @@ package edu.harvard.iq.dataverse.cache; -import ai.grakn.redismock.RedisServer; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -27,10 +24,9 @@ public class CacheFactoryBeanTest { @Mock SystemConfig systemConfig; @InjectMocks - CacheFactoryBean cache = new CacheFactoryBean(); + static CacheFactoryBean cache = new CacheFactoryBean(); AuthenticatedUser authUser = new AuthenticatedUser(); String action; - static RedisServer mockRedisServer; static final String settingJson = "{\n" + " \"rateLimits\":[\n" + " {\n" + @@ -76,29 +72,17 @@ public class CacheFactoryBeanTest { @BeforeEach public void setup() throws IOException { - lenient().doReturn("127.0.0.1").when(systemConfig).getRedisBaseHost(); - lenient().doReturn(String.valueOf(mockRedisServer.getBindPort())).when(systemConfig).getRedisBasePort(); - lenient().doReturn(null).when(systemConfig).getRedisUser(); - lenient().doReturn(null).when(systemConfig).getRedisPassword(); lenient().doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); lenient().doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); lenient().doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); + lenient().doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(3), anyInt()); lenient().doReturn(settingJson).when(systemConfig).getRateLimitsJson(); cache.init(); authUser.setRateLimitTier(1); // reset to default action = "cmd-" + UUID.randomUUID(); } - @BeforeAll - public static void init() throws IOException { - mockRedisServer = RedisServer.newRedisServer(); - mockRedisServer.start(); - } - @AfterAll - public static void cleanup() { - if (mockRedisServer != null) - mockRedisServer.stop(); - } + @Test public void testGuestUserGettingRateLimited() throws InterruptedException { User user = GuestUser.get(); @@ -152,21 +136,15 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio } } assertTrue(!limited); - } - @Test - public void testAuthenticatedUserWithRateLimitingOff() throws InterruptedException { - lenient().doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); - authUser.setSuperuser(false); - authUser.setUserIdentifier("user1"); - boolean rateLimited = false; - int cnt = 0; - for (; cnt <100; cnt++) { - rateLimited = !cache.checkRate(authUser, action); - if (rateLimited) { + // Now change the user's tier so it is no longer limited + authUser.setRateLimitTier(3); // tier 3 = no limit + for (cnt = 0; cnt <200; cnt++) { + limited = !cache.checkRate(authUser, action); + if (limited) { break; } } - assertTrue(!rateLimited && cnt > 99); + assertTrue(!limited && cnt == 200); } } From 58ea032a2bc76d06e496e3e5ca0239146febc00a Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 26 Jan 2024 16:47:05 -0500 Subject: [PATCH 0851/1112] adding cache tests --- .../iq/dataverse/cache/CacheFactoryBean.java | 32 +++++++++++++++++++ .../dataverse/cache/CacheFactoryBeanTest.java | 10 ++++++ 2 files changed, 42 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 25bc20ec03d..43c79b8c7b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -65,4 +65,36 @@ public boolean checkRate(User user, String action) { RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, action); return (!RateLimitUtil.rateLimited(rateLimitCache, id.toString(), capacity)); } + + public long getCacheSize(String cacheName) { + long cacheSize = 0; + switch (cacheName) { + case RATE_LIMIT_CACHE: + cacheSize = rateLimitCache.size(); + break; + default: + break; + } + return cacheSize; + } + public Object getCacheValue(String cacheName, String key) { + Object cacheValue = null; + switch (cacheName) { + case RATE_LIMIT_CACHE: + cacheValue = rateLimitCache.containsKey(key) ? rateLimitCache.get(key) : ""; + break; + default: + break; + } + return cacheValue; + } + public void setCacheValue(String cacheName, String key, Object value) { + switch (cacheName) { + case RATE_LIMIT_CACHE: + rateLimitCache.put(key, (String) value); + break; + default: + break; + } + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 6241674dd7a..f65da27deb6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -81,6 +81,16 @@ public void setup() throws IOException { cache.init(); authUser.setRateLimitTier(1); // reset to default action = "cmd-" + UUID.randomUUID(); + + // testing cache implementation and code coverage + final String cacheKey = "CacheTestKey" + UUID.randomUUID(); + final String cacheValue = "CacheTestValue" + UUID.randomUUID(); + long cacheSize = cache.getCacheSize(cache.RATE_LIMIT_CACHE); + System.out.println("Cache Size : " + cacheSize); + cache.setCacheValue(cache.RATE_LIMIT_CACHE, cacheKey,cacheValue); + assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > cacheSize); + Object cacheValueObj = cache.getCacheValue(cache.RATE_LIMIT_CACHE, cacheKey); + assertTrue(cacheValueObj != null && cacheValue.equalsIgnoreCase((String) cacheValueObj)); } @Test From f7f96646f23f7ba4fdba1f18372efa523bc716d1 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 09:50:46 -0500 Subject: [PATCH 0852/1112] fixing unit tests --- .../iq/dataverse/cache/CacheFactoryBean.java | 9 +++---- .../iq/dataverse/cache/RateLimitUtil.java | 13 ++++++++-- .../dataverse/cache/CacheFactoryBeanTest.java | 24 +++++++++---------- 3 files changed, 26 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 43c79b8c7b8..d39c4686bfe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -53,17 +53,14 @@ public boolean checkRate(User user, String action) { if (user != null && user.isSuperuser()) { return true; }; - StringBuffer id = new StringBuffer(); - id.append(user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()); - if (action != null) { - id.append(":").append(action); - } + + String cacheKey = RateLimitUtil.generateCacheKey(user, action); // get the capacity, i.e. calls per hour, from config int capacity = (user instanceof AuthenticatedUser) ? RateLimitUtil.getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, action); - return (!RateLimitUtil.rateLimited(rateLimitCache, id.toString(), capacity)); + return (!RateLimitUtil.rateLimited(rateLimitCache, cacheKey, capacity)); } public long getCacheSize(String cacheName) { diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index 0688e4536ee..c60f2bb8e0e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -1,6 +1,8 @@ package edu.harvard.iq.dataverse.cache; import com.google.gson.Gson; +import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.json.Json; @@ -28,6 +30,14 @@ protected static int getCapacityByTier(SystemConfig systemConfig, int tier) { return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); } + public static String generateCacheKey(final User user, final String action) { + StringBuffer id = new StringBuffer(); + id.append(user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()); + if (action != null) { + id.append(":").append(action); + } + return id.toString(); + } public static boolean rateLimited(final Map cache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; @@ -83,9 +93,8 @@ private static void getRateLimitsFromJson(SystemConfig systemConfig) { rateLimits.addAll(gson.fromJson(String.valueOf(lst), new ArrayList() {}.getClass().getGenericSuperclass())); } catch (Exception e) { - logger.warning("Unable to parse Rate Limit Json" + ": " + e.getLocalizedMessage()); + logger.warning("Unable to parse Rate Limit Json: " + e.getLocalizedMessage() + " Json:(" + setting + ")"); rateLimits.add(new RateLimitSetting()); // add a default entry to prevent re-initialization - e.printStackTrace(); } } } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index f65da27deb6..df57948980d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -104,7 +104,7 @@ public void testGuestUserGettingRateLimited() throws InterruptedException { break; } } - assertTrue(rateLimited && cnt > 1 && cnt <= 30); + assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } @Test @@ -120,7 +120,7 @@ public void testAdminUserExemptFromGettingRateLimited() throws InterruptedExcept break; } } - assertTrue(!rateLimited && cnt >= 99); + assertTrue(!rateLimited && cnt >= 99, "rateLimited:"+rateLimited + " cnt:"+cnt); } @Test @@ -128,33 +128,33 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio authUser.setSuperuser(false); authUser.setUserIdentifier("authUser"); authUser.setRateLimitTier(2); // 120 cals per hour - 1 added token every 30 seconds - boolean limited = false; + boolean rateLimited = false; int cnt; for (cnt = 0; cnt <200; cnt++) { - limited = !cache.checkRate(authUser, action); - if (limited) { + rateLimited = !cache.checkRate(authUser, action); + if (rateLimited) { break; } } - assertTrue(limited && cnt == 120); + assertTrue(rateLimited && cnt == 120, "rateLimited:"+rateLimited + " cnt:"+cnt); for (cnt = 0; cnt <60; cnt++) { Thread.sleep(1000);// wait for bucket to be replenished (check each second for 1 minute max) - limited = !cache.checkRate(authUser, action); - if (!limited) { + rateLimited = !cache.checkRate(authUser, action); + if (!rateLimited) { break; } } - assertTrue(!limited); + assertTrue(!rateLimited, "rateLimited:"+rateLimited + " cnt:"+cnt); // Now change the user's tier so it is no longer limited authUser.setRateLimitTier(3); // tier 3 = no limit for (cnt = 0; cnt <200; cnt++) { - limited = !cache.checkRate(authUser, action); - if (limited) { + rateLimited = !cache.checkRate(authUser, action); + if (rateLimited) { break; } } - assertTrue(!limited && cnt == 200); + assertTrue(!rateLimited && cnt == 200, "rateLimited:"+rateLimited + " cnt:"+cnt); } } From dbb774b0b146d5c7ea4f3aea8ad5108e49b63ab0 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 10:39:12 -0500 Subject: [PATCH 0853/1112] fixing unit tests --- .../iq/dataverse/cache/CacheFactoryBean.java | 18 ++++++---------- .../iq/dataverse/cache/RateLimitUtil.java | 17 ++++++++++++--- .../dataverse/cache/CacheFactoryBeanTest.java | 10 ++++----- .../iq/dataverse/cache/RateLimitUtilTest.java | 21 +++++++++++++++++++ 4 files changed, 46 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index d39c4686bfe..a1caa0379e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -3,8 +3,6 @@ import com.hazelcast.config.Config; import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstance; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.annotation.PostConstruct; @@ -50,17 +48,13 @@ protected void finalize() throws Throwable { * @return true if user is superuser or rate not limited */ public boolean checkRate(User user, String action) { - if (user != null && user.isSuperuser()) { + int capacity = RateLimitUtil.getCapacity(systemConfig, user, action); + if (capacity == RateLimitUtil.NO_LIMIT) { return true; - }; - - String cacheKey = RateLimitUtil.generateCacheKey(user, action); - - // get the capacity, i.e. calls per hour, from config - int capacity = (user instanceof AuthenticatedUser) ? - RateLimitUtil.getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : - RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, action); - return (!RateLimitUtil.rateLimited(rateLimitCache, cacheKey, capacity)); + } else { + String cacheKey = RateLimitUtil.generateCacheKey(user, action); + return (!RateLimitUtil.rateLimited(rateLimitCache, cacheKey, capacity)); + } } public long getCacheSize(String cacheName) { diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index c60f2bb8e0e..a5bff19599c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -1,10 +1,12 @@ package edu.harvard.iq.dataverse.cache; import com.google.gson.Gson; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import jakarta.ejb.EJB; import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; @@ -30,7 +32,7 @@ protected static int getCapacityByTier(SystemConfig systemConfig, int tier) { return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); } - public static String generateCacheKey(final User user, final String action) { + protected static String generateCacheKey(final User user, final String action) { StringBuffer id = new StringBuffer(); id.append(user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()); if (action != null) { @@ -38,7 +40,16 @@ public static String generateCacheKey(final User user, final String action) { } return id.toString(); } - public static boolean rateLimited(final Map cache, final String key, int capacityPerHour) { + protected static int getCapacity(SystemConfig systemConfig, User user, String action) { + if (user != null && user.isSuperuser()) { + return NO_LIMIT; + }; + // get the capacity, i.e. calls per hour, from config + return (user instanceof AuthenticatedUser) ? + RateLimitUtil.getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : + RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, action); + } + protected static boolean rateLimited(final Map cache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; } @@ -60,7 +71,7 @@ public static boolean rateLimited(final Map cache, final String return tokens < 1; } - public static int getCapacityByTierAndAction(SystemConfig systemConfig, Integer tier, String action) { + protected static int getCapacityByTierAndAction(SystemConfig systemConfig, Integer tier, String action) { if (rateLimits.isEmpty()) { init(systemConfig); } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index df57948980d..e7b98d84908 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -26,7 +26,6 @@ public class CacheFactoryBeanTest { @InjectMocks static CacheFactoryBean cache = new CacheFactoryBean(); AuthenticatedUser authUser = new AuthenticatedUser(); - String action; static final String settingJson = "{\n" + " \"rateLimits\":[\n" + " {\n" + @@ -80,13 +79,11 @@ public void setup() throws IOException { cache.init(); authUser.setRateLimitTier(1); // reset to default - action = "cmd-" + UUID.randomUUID(); // testing cache implementation and code coverage final String cacheKey = "CacheTestKey" + UUID.randomUUID(); final String cacheValue = "CacheTestValue" + UUID.randomUUID(); long cacheSize = cache.getCacheSize(cache.RATE_LIMIT_CACHE); - System.out.println("Cache Size : " + cacheSize); cache.setCacheValue(cache.RATE_LIMIT_CACHE, cacheKey,cacheValue); assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > cacheSize); Object cacheValueObj = cache.getCacheValue(cache.RATE_LIMIT_CACHE, cacheKey); @@ -96,6 +93,7 @@ public void setup() throws IOException { @Test public void testGuestUserGettingRateLimited() throws InterruptedException { User user = GuestUser.get(); + String action = "cmd-" + UUID.randomUUID(); boolean rateLimited = false; int cnt = 0; for (; cnt <100; cnt++) { @@ -104,6 +102,7 @@ public void testGuestUserGettingRateLimited() throws InterruptedException { break; } } + assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > 0); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } @@ -111,7 +110,7 @@ public void testGuestUserGettingRateLimited() throws InterruptedException { public void testAdminUserExemptFromGettingRateLimited() throws InterruptedException { authUser.setSuperuser(true); authUser.setUserIdentifier("admin"); - + String action = "cmd-" + UUID.randomUUID(); boolean rateLimited = false; int cnt = 0; for (; cnt <100; cnt++) { @@ -128,6 +127,7 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio authUser.setSuperuser(false); authUser.setUserIdentifier("authUser"); authUser.setRateLimitTier(2); // 120 cals per hour - 1 added token every 30 seconds + String action = "cmd-" + UUID.randomUUID(); boolean rateLimited = false; int cnt; for (cnt = 0; cnt <200; cnt++) { @@ -147,7 +147,7 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio } assertTrue(!rateLimited, "rateLimited:"+rateLimited + " cnt:"+cnt); - // Now change the user's tier so it is no longer limited + // Now change the user's tier, so it is no longer limited authUser.setRateLimitTier(3); // tier 3 = no limit for (cnt = 0; cnt <200; cnt++) { rateLimited = !cache.checkRate(authUser, action); diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java index d51fe7471e3..b2b7434cc3c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java @@ -1,5 +1,8 @@ package edu.harvard.iq.dataverse.cache; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -92,4 +95,22 @@ public void testBadJson() { assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 2, "GetLatestAccessibleDatasetVersionCommand")); } + + @Test + public void testGenerateCacheKey() { + User user = GuestUser.get(); + assertEquals(RateLimitUtil.generateCacheKey(user,"action1"), ":guest:action1"); + } + @Test + public void testGetCapacity() { + lenient().doReturn(settingJson).when(systemConfig).getRateLimitsJson(); + GuestUser guestUser = GuestUser.get(); + assertEquals(10, RateLimitUtil.getCapacity(systemConfig, guestUser, "GetPrivateUrlCommand")); + + AuthenticatedUser authUser = new AuthenticatedUser(); + authUser.setRateLimitTier(1); + assertEquals(30, RateLimitUtil.getCapacity(systemConfig, authUser, "GetPrivateUrlCommand")); + authUser.setSuperuser(true); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(systemConfig, authUser, "GetPrivateUrlCommand")); + } } From b489ec87d970a49dbb72a30f974609c81806824b Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 11:03:53 -0500 Subject: [PATCH 0854/1112] fixing unit tests --- .../edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index e7b98d84908..488c4afdd19 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -89,7 +89,7 @@ public void setup() throws IOException { Object cacheValueObj = cache.getCacheValue(cache.RATE_LIMIT_CACHE, cacheKey); assertTrue(cacheValueObj != null && cacheValue.equalsIgnoreCase((String) cacheValueObj)); } - +/* @Test public void testGuestUserGettingRateLimited() throws InterruptedException { User user = GuestUser.get(); @@ -105,7 +105,7 @@ public void testGuestUserGettingRateLimited() throws InterruptedException { assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > 0); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } - +*/ @Test public void testAdminUserExemptFromGettingRateLimited() throws InterruptedException { authUser.setSuperuser(true); From 700e7991226c25bf608c737825e393977a073df9 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 11:37:36 -0500 Subject: [PATCH 0855/1112] fixing unit tests --- .../harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 488c4afdd19..88704840923 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -89,7 +89,7 @@ public void setup() throws IOException { Object cacheValueObj = cache.getCacheValue(cache.RATE_LIMIT_CACHE, cacheKey); assertTrue(cacheValueObj != null && cacheValue.equalsIgnoreCase((String) cacheValueObj)); } -/* + @Test public void testGuestUserGettingRateLimited() throws InterruptedException { User user = GuestUser.get(); @@ -102,10 +102,15 @@ public void testGuestUserGettingRateLimited() throws InterruptedException { break; } } + String key = RateLimitUtil.generateCacheKey(user,action); + String value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); + String keyLastUpdate = String.format("%s:last_update",key); + String lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); + System.out.println(">>> key/value/lastUpdate /" + key + "/" + value + "/" + lastUpdate); assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > 0); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } -*/ + @Test public void testAdminUserExemptFromGettingRateLimited() throws InterruptedException { authUser.setSuperuser(true); From 5a7d3002dcecc60dddf44de730b7a1a3d8cd14a5 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 12:05:21 -0500 Subject: [PATCH 0856/1112] fixing unit tests --- .../dataverse/cache/CacheFactoryBeanTest.java | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 88704840923..1918c7b6743 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -94,6 +94,13 @@ public void setup() throws IOException { public void testGuestUserGettingRateLimited() throws InterruptedException { User user = GuestUser.get(); String action = "cmd-" + UUID.randomUUID(); + + String key = RateLimitUtil.generateCacheKey(user,action); + String value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); + String keyLastUpdate = String.format("%s:last_update",key); + String lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); + System.out.println(">>> key/value/lastUpdate /" + key + "/" + value + "/" + lastUpdate); + boolean rateLimited = false; int cnt = 0; for (; cnt <100; cnt++) { @@ -101,11 +108,15 @@ public void testGuestUserGettingRateLimited() throws InterruptedException { if (rateLimited) { break; } + if (cnt == 10) { + value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); + lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); + System.out.println(">>> key/value/lastUpdate /" + key + "/" + value + "/" + lastUpdate); + } } - String key = RateLimitUtil.generateCacheKey(user,action); - String value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); - String keyLastUpdate = String.format("%s:last_update",key); - String lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); + + value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); + lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); System.out.println(">>> key/value/lastUpdate /" + key + "/" + value + "/" + lastUpdate); assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > 0); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); From e2b5fe85991e035824748ba16fba547781e89999 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 14:37:36 -0500 Subject: [PATCH 0857/1112] fixing unit tests --- .../iq/dataverse/cache/RateLimitUtil.java | 7 ++-- .../dataverse/cache/CacheFactoryBeanTest.java | 36 ++++++++++--------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index a5bff19599c..73de0fe5528 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -6,7 +6,6 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; -import jakarta.ejb.EJB; import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; @@ -96,7 +95,7 @@ private static void init(SystemConfig systemConfig) { private static void getRateLimitsFromJson(SystemConfig systemConfig) { String setting = systemConfig.getRateLimitsJson(); - if (!setting.isEmpty()) { + if (!setting.isEmpty() && rateLimits.isEmpty()) { try { JsonReader jr = Json.createReader(new StringReader(setting)); JsonObject obj= jr.readObject(); @@ -110,11 +109,11 @@ private static void getRateLimitsFromJson(SystemConfig systemConfig) { } } - private static String getMapKey(Integer tier) { + private static String getMapKey(int tier) { return getMapKey(tier, null); } - private static String getMapKey(Integer tier, String action) { + private static String getMapKey(int tier, String action) { StringBuffer key = new StringBuffer(); key.append(tier).append(":"); if (action != null) { diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 1918c7b6743..e3d334d4623 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; -import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -10,15 +9,18 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import java.io.IOException; import java.util.UUID; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.doReturn; @ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class CacheFactoryBeanTest { @Mock @@ -26,6 +28,7 @@ public class CacheFactoryBeanTest { @InjectMocks static CacheFactoryBean cache = new CacheFactoryBean(); AuthenticatedUser authUser = new AuthenticatedUser(); + GuestUser guestUser = GuestUser.get(); static final String settingJson = "{\n" + " \"rateLimits\":[\n" + " {\n" + @@ -71,13 +74,13 @@ public class CacheFactoryBeanTest { @BeforeEach public void setup() throws IOException { - lenient().doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); - lenient().doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); - lenient().doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); - lenient().doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(3), anyInt()); - lenient().doReturn(settingJson).when(systemConfig).getRateLimitsJson(); + doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); + doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); + doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); + doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(3), anyInt()); + doReturn(settingJson).when(systemConfig).getRateLimitsJson(); - cache.init(); + cache.init(); // PostConstruct authUser.setRateLimitTier(1); // reset to default // testing cache implementation and code coverage @@ -91,39 +94,38 @@ public void setup() throws IOException { } @Test - public void testGuestUserGettingRateLimited() throws InterruptedException { - User user = GuestUser.get(); + public void testGuestUserGettingRateLimited() { String action = "cmd-" + UUID.randomUUID(); - String key = RateLimitUtil.generateCacheKey(user,action); + String key = RateLimitUtil.generateCacheKey(guestUser,action); String value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); String keyLastUpdate = String.format("%s:last_update",key); String lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); - System.out.println(">>> key/value/lastUpdate /" + key + "/" + value + "/" + lastUpdate); + System.out.println(">>> key|value|lastUpdate |" + key + "|" + value + "|" + lastUpdate); boolean rateLimited = false; int cnt = 0; for (; cnt <100; cnt++) { - rateLimited = !cache.checkRate(user, action); + rateLimited = !cache.checkRate(guestUser, action); if (rateLimited) { break; } - if (cnt == 10) { + if (cnt % 10 == 0) { value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); - System.out.println(">>> key/value/lastUpdate /" + key + "/" + value + "/" + lastUpdate); + System.out.println(cnt + " key|value|lastUpdate |" + key + "|" + value + "|" + lastUpdate); } } value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); - System.out.println(">>> key/value/lastUpdate /" + key + "/" + value + "/" + lastUpdate); + System.out.println(cnt + " key|value|lastUpdate |" + key + "|" + value + "|" + lastUpdate); assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > 0); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } @Test - public void testAdminUserExemptFromGettingRateLimited() throws InterruptedException { + public void testAdminUserExemptFromGettingRateLimited() { authUser.setSuperuser(true); authUser.setUserIdentifier("admin"); String action = "cmd-" + UUID.randomUUID(); From 7fb8c8867ab36d6544639c279ba6c99a45b7703b Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 15:05:45 -0500 Subject: [PATCH 0858/1112] fixing unit tests --- .../edu/harvard/iq/dataverse/cache/RateLimitUtil.java | 5 +++-- .../harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 9 +++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index 73de0fe5528..48b1b1be072 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -28,6 +28,7 @@ public class RateLimitUtil { public static final int NO_LIMIT = -1; protected static int getCapacityByTier(SystemConfig systemConfig, int tier) { + System.out.println("getIntFromCSVStringOrDefault: " +tier + " " + systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT)); return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); } @@ -45,8 +46,8 @@ protected static int getCapacity(SystemConfig systemConfig, User user, String ac }; // get the capacity, i.e. calls per hour, from config return (user instanceof AuthenticatedUser) ? - RateLimitUtil.getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : - RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, action); + getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : + getCapacityByTierAndAction(systemConfig, 0, action); } protected static boolean rateLimited(final Map cache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index e3d334d4623..15408605473 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -74,10 +75,10 @@ public class CacheFactoryBeanTest { @BeforeEach public void setup() throws IOException { - doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); - doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); - doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); - doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(3), anyInt()); + doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(0), eq(RateLimitUtil.NO_LIMIT)); + doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(1), eq(RateLimitUtil.NO_LIMIT)); + doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(2), eq(RateLimitUtil.NO_LIMIT)); + doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(3), eq(RateLimitUtil.NO_LIMIT)); doReturn(settingJson).when(systemConfig).getRateLimitsJson(); cache.init(); // PostConstruct From 0674105914b7f4d7faff1855c2583fddce0eb629 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 16:00:08 -0500 Subject: [PATCH 0859/1112] fixing unit tests --- .../iq/dataverse/cache/CacheFactoryBean.java | 1 + .../dataverse/cache/CacheFactoryBeanTest.java | 62 ++++++++++--------- 2 files changed, 34 insertions(+), 29 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index a1caa0379e0..f7b93b52c3e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -29,6 +29,7 @@ public void init() { if (hazelcastInstance == null) { Config hazelcastConfig = new Config(); hazelcastConfig.setClusterName("dataverse"); + hazelcastConfig.getJetConfig().setEnabled(true); hazelcastInstance = Hazelcast.newHazelcastInstance(hazelcastConfig); rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 15408605473..e968a6f9fad 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -7,8 +7,6 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; @@ -19,17 +17,17 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class CacheFactoryBeanTest { - @Mock - SystemConfig systemConfig; - @InjectMocks - static CacheFactoryBean cache = new CacheFactoryBean(); + private SystemConfig mockedSystemConfig; + static CacheFactoryBean cache = null; AuthenticatedUser authUser = new AuthenticatedUser(); GuestUser guestUser = GuestUser.get(); + String action; static final String settingJson = "{\n" + " \"rateLimits\":[\n" + " {\n" + @@ -75,29 +73,39 @@ public class CacheFactoryBeanTest { @BeforeEach public void setup() throws IOException { - doReturn(30).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(0), eq(RateLimitUtil.NO_LIMIT)); - doReturn(60).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(1), eq(RateLimitUtil.NO_LIMIT)); - doReturn(120).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(2), eq(RateLimitUtil.NO_LIMIT)); - doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(3), eq(RateLimitUtil.NO_LIMIT)); - doReturn(settingJson).when(systemConfig).getRateLimitsJson(); - - cache.init(); // PostConstruct - authUser.setRateLimitTier(1); // reset to default - - // testing cache implementation and code coverage - final String cacheKey = "CacheTestKey" + UUID.randomUUID(); - final String cacheValue = "CacheTestValue" + UUID.randomUUID(); - long cacheSize = cache.getCacheSize(cache.RATE_LIMIT_CACHE); - cache.setCacheValue(cache.RATE_LIMIT_CACHE, cacheKey,cacheValue); - assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > cacheSize); - Object cacheValueObj = cache.getCacheValue(cache.RATE_LIMIT_CACHE, cacheKey); - assertTrue(cacheValueObj != null && cacheValue.equalsIgnoreCase((String) cacheValueObj)); + // reuse cache and config for all tests + if (cache == null) { + mockedSystemConfig = mock(SystemConfig.class); + doReturn(30).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(0), eq(RateLimitUtil.NO_LIMIT)); + doReturn(60).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(1), eq(RateLimitUtil.NO_LIMIT)); + doReturn(120).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(2), eq(RateLimitUtil.NO_LIMIT)); + doReturn(RateLimitUtil.NO_LIMIT).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(3), eq(RateLimitUtil.NO_LIMIT)); + doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); + cache = new CacheFactoryBean(); + cache.systemConfig = mockedSystemConfig; + cache.init(); // PostConstruct - start Hazelcast + + // testing cache implementation and code coverage + final String cacheKey = "CacheTestKey" + UUID.randomUUID(); + final String cacheValue = "CacheTestValue" + UUID.randomUUID(); + long cacheSize = cache.getCacheSize(cache.RATE_LIMIT_CACHE); + cache.setCacheValue(cache.RATE_LIMIT_CACHE, cacheKey,cacheValue); + assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > cacheSize); + Object cacheValueObj = cache.getCacheValue(cache.RATE_LIMIT_CACHE, cacheKey); + assertTrue(cacheValueObj != null && cacheValue.equalsIgnoreCase((String) cacheValueObj)); + } + + // reset to default auth user + authUser.setRateLimitTier(1); + authUser.setSuperuser(false); + authUser.setUserIdentifier("authUser"); + + // create a unique action for each test + action = "cmd-" + UUID.randomUUID(); } @Test public void testGuestUserGettingRateLimited() { - String action = "cmd-" + UUID.randomUUID(); - String key = RateLimitUtil.generateCacheKey(guestUser,action); String value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); String keyLastUpdate = String.format("%s:last_update",key); @@ -129,7 +137,6 @@ public void testGuestUserGettingRateLimited() { public void testAdminUserExemptFromGettingRateLimited() { authUser.setSuperuser(true); authUser.setUserIdentifier("admin"); - String action = "cmd-" + UUID.randomUUID(); boolean rateLimited = false; int cnt = 0; for (; cnt <100; cnt++) { @@ -143,10 +150,7 @@ public void testAdminUserExemptFromGettingRateLimited() { @Test public void testAuthenticatedUserGettingRateLimited() throws InterruptedException { - authUser.setSuperuser(false); - authUser.setUserIdentifier("authUser"); authUser.setRateLimitTier(2); // 120 cals per hour - 1 added token every 30 seconds - String action = "cmd-" + UUID.randomUUID(); boolean rateLimited = false; int cnt; for (cnt = 0; cnt <200; cnt++) { From a55ed93dd2136dd20921d5bafa78957974a253d8 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 16:27:22 -0500 Subject: [PATCH 0860/1112] fixing unit tests --- .../iq/dataverse/cache/CacheFactoryBean.java | 4 +- .../iq/dataverse/cache/RateLimitUtil.java | 11 ++-- .../dataverse/cache/CacheFactoryBeanTest.java | 4 ++ .../iq/dataverse/cache/RateLimitUtilTest.java | 55 ++++++++++--------- 4 files changed, 40 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index f7b93b52c3e..71e009c7ef2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -18,7 +18,7 @@ public class CacheFactoryBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); private static HazelcastInstance hazelcastInstance = null; - private static Map rateLimitCache; + protected static Map rateLimitCache; @EJB SystemConfig systemConfig; @@ -54,7 +54,7 @@ public boolean checkRate(User user, String action) { return true; } else { String cacheKey = RateLimitUtil.generateCacheKey(user, action); - return (!RateLimitUtil.rateLimited(rateLimitCache, cacheKey, capacity)); + return (!RateLimitUtil.rateLimited(cacheKey, capacity)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index 48b1b1be072..a1ccab65505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -28,7 +28,6 @@ public class RateLimitUtil { public static final int NO_LIMIT = -1; protected static int getCapacityByTier(SystemConfig systemConfig, int tier) { - System.out.println("getIntFromCSVStringOrDefault: " +tier + " " + systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT)); return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); } @@ -49,7 +48,7 @@ protected static int getCapacity(SystemConfig systemConfig, User user, String ac getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : getCapacityByTierAndAction(systemConfig, 0, action); } - protected static boolean rateLimited(final Map cache, final String key, int capacityPerHour) { + protected static boolean rateLimited(final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; } @@ -57,17 +56,17 @@ protected static boolean rateLimited(final Map cache, final Stri double tokensPerMinute = (capacityPerHour / 60.0); // Get the last time this bucket was added to final String keyLastUpdate = String.format("%s:last_update",key); - long lastUpdate = longFromKey(cache, keyLastUpdate); + long lastUpdate = longFromKey(CacheFactoryBean.rateLimitCache, keyLastUpdate); long deltaTime = currentTime - lastUpdate; // Get the current number of tokens in the bucket - long tokens = longFromKey(cache, key); + long tokens = longFromKey(CacheFactoryBean.rateLimitCache, key); long tokensToAdd = (long) (deltaTime * tokensPerMinute); if (tokensToAdd > 0) { // Don't update timestamp if we aren't adding any tokens to the bucket tokens = min(capacityPerHour, tokens + tokensToAdd); - cache.put(keyLastUpdate, String.valueOf(currentTime)); + CacheFactoryBean.rateLimitCache.put(keyLastUpdate, String.valueOf(currentTime)); } // Update with any added tokens and decrement 1 token for this call if not rate limited (0 tokens) - cache.put(key, String.valueOf(max(0, tokens-1))); + CacheFactoryBean.rateLimitCache.put(key, String.valueOf(max(0, tokens-1))); return tokens < 1; } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index e968a6f9fad..5eb0305a60a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -85,6 +85,10 @@ public void setup() throws IOException { cache.systemConfig = mockedSystemConfig; cache.init(); // PostConstruct - start Hazelcast + // clear the static data so it can be reloaded with the new mocked data + RateLimitUtil.rateLimitMap.clear(); + RateLimitUtil.rateLimits.clear(); + // testing cache implementation and code coverage final String cacheKey = "CacheTestKey" + UUID.randomUUID(); final String cacheValue = "CacheTestValue" + UUID.randomUUID(); diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java index b2b7434cc3c..a7825481ade 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java @@ -3,23 +3,24 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.*; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.*; @ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class RateLimitUtilTest { - @Mock - SystemConfig systemConfig; + private SystemConfig mockedSystemConfig; static final String settingJson = "{\n" + " \"rateLimits\":[\n" + @@ -67,33 +68,35 @@ public class RateLimitUtilTest { @BeforeEach public void setup() { - lenient().doReturn(100).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(0), anyInt()); - lenient().doReturn(200).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(1), anyInt()); - lenient().doReturn(RateLimitUtil.NO_LIMIT).when(systemConfig).getIntFromCSVStringOrDefault(any(),eq(2), anyInt()); + mockedSystemConfig = mock(SystemConfig.class); + doReturn(100).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(0), eq(RateLimitUtil.NO_LIMIT)); + doReturn(200).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(1), eq(RateLimitUtil.NO_LIMIT)); + doReturn(RateLimitUtil.NO_LIMIT).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(2), eq(RateLimitUtil.NO_LIMIT)); + // clear the static data so it can be reloaded with the new mocked data RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); } @Test public void testConfig() { - lenient().doReturn(settingJson).when(systemConfig).getRateLimitsJson(); - assertEquals(100, RateLimitUtil.getCapacityByTier(systemConfig, 0)); - assertEquals(200, RateLimitUtil.getCapacityByTier(systemConfig, 1)); - assertEquals(1, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, "DestroyDatasetCommand")); - assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, "Default Limit")); + doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); + assertEquals(100, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 0)); + assertEquals(200, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 1)); + assertEquals(1, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 0, "DestroyDatasetCommand")); + assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 0, "Default Limit")); - assertEquals(30, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); - assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 1, "Default Limit")); + assertEquals(30, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); + assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 1, "Default Limit")); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 2, "Default No Limit")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 2, "Default No Limit")); } @Test public void testBadJson() { - lenient().doReturn(settingJsonBad).when(systemConfig).getRateLimitsJson(); - assertEquals(100, RateLimitUtil.getCapacityByTier(systemConfig, 0)); - assertEquals(200, RateLimitUtil.getCapacityByTier(systemConfig, 1)); - assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 0, "GetLatestAccessibleDatasetVersionCommand")); - assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(systemConfig, 2, "GetLatestAccessibleDatasetVersionCommand")); + doReturn(settingJsonBad).when(mockedSystemConfig).getRateLimitsJson(); + assertEquals(100, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 0)); + assertEquals(200, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 1)); + assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 0, "GetLatestAccessibleDatasetVersionCommand")); + assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 2, "GetLatestAccessibleDatasetVersionCommand")); } @Test @@ -103,14 +106,14 @@ public void testGenerateCacheKey() { } @Test public void testGetCapacity() { - lenient().doReturn(settingJson).when(systemConfig).getRateLimitsJson(); + doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); GuestUser guestUser = GuestUser.get(); - assertEquals(10, RateLimitUtil.getCapacity(systemConfig, guestUser, "GetPrivateUrlCommand")); + assertEquals(10, RateLimitUtil.getCapacity(mockedSystemConfig, guestUser, "GetPrivateUrlCommand")); AuthenticatedUser authUser = new AuthenticatedUser(); authUser.setRateLimitTier(1); - assertEquals(30, RateLimitUtil.getCapacity(systemConfig, authUser, "GetPrivateUrlCommand")); + assertEquals(30, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); authUser.setSuperuser(true); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(systemConfig, authUser, "GetPrivateUrlCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); } } From c7b5969e6545777391fadf251b1ec709cd79eaf5 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 29 Jan 2024 16:42:21 -0500 Subject: [PATCH 0861/1112] fixing unit tests --- .../iq/dataverse/cache/CacheFactoryBeanTest.java | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 5eb0305a60a..63c3f9e8bb8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -110,12 +110,6 @@ public void setup() throws IOException { @Test public void testGuestUserGettingRateLimited() { - String key = RateLimitUtil.generateCacheKey(guestUser,action); - String value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); - String keyLastUpdate = String.format("%s:last_update",key); - String lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); - System.out.println(">>> key|value|lastUpdate |" + key + "|" + value + "|" + lastUpdate); - boolean rateLimited = false; int cnt = 0; for (; cnt <100; cnt++) { @@ -123,16 +117,7 @@ public void testGuestUserGettingRateLimited() { if (rateLimited) { break; } - if (cnt % 10 == 0) { - value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); - lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); - System.out.println(cnt + " key|value|lastUpdate |" + key + "|" + value + "|" + lastUpdate); - } } - - value = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, key)); - lastUpdate = String.valueOf(cache.getCacheValue(cache.RATE_LIMIT_CACHE, keyLastUpdate)); - System.out.println(cnt + " key|value|lastUpdate |" + key + "|" + value + "|" + lastUpdate); assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > 0); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } From a27c7851e76282da3b88c1acf51989c5e5216be4 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 30 Jan 2024 11:54:14 -0500 Subject: [PATCH 0862/1112] fixing unit tests --- .../iq/dataverse/cache/CacheFactoryBean.java | 25 ++++++++--- .../iq/dataverse/cache/RateLimitUtil.java | 10 ++--- .../dataverse/cache/CacheFactoryBeanTest.java | 41 +++++++++++++++++++ 3 files changed, 66 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 71e009c7ef2..213ba429bdf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.annotation.PostConstruct; +import jakarta.annotation.PreDestroy; import jakarta.ejb.EJB; import jakarta.ejb.Singleton; import jakarta.ejb.Startup; @@ -17,8 +18,8 @@ @Startup public class CacheFactoryBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); - private static HazelcastInstance hazelcastInstance = null; - protected static Map rateLimitCache; + private HazelcastInstance hazelcastInstance = null; + private Map rateLimitCache; @EJB SystemConfig systemConfig; @@ -34,11 +35,16 @@ public void init() { rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); } } - @Override - protected void finalize() throws Throwable { + @PreDestroy + protected void cleanup() { if (hazelcastInstance != null) { hazelcastInstance.shutdown(); + hazelcastInstance = null; } + } + @Override + protected void finalize() throws Throwable { + cleanup(); super.finalize(); } @@ -54,7 +60,7 @@ public boolean checkRate(User user, String action) { return true; } else { String cacheKey = RateLimitUtil.generateCacheKey(user, action); - return (!RateLimitUtil.rateLimited(cacheKey, capacity)); + return (!RateLimitUtil.rateLimited(rateLimitCache, cacheKey, capacity)); } } @@ -89,4 +95,13 @@ public void setCacheValue(String cacheName, String key, Object value) { break; } } + public void clearCache(String cacheName) { + switch (cacheName) { + case RATE_LIMIT_CACHE: + rateLimitCache.clear(); + break; + default: + break; + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index a1ccab65505..1e676adfe03 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -48,7 +48,7 @@ protected static int getCapacity(SystemConfig systemConfig, User user, String ac getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : getCapacityByTierAndAction(systemConfig, 0, action); } - protected static boolean rateLimited(final String key, int capacityPerHour) { + protected static boolean rateLimited(final Map rateLimitCache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; } @@ -56,17 +56,17 @@ protected static boolean rateLimited(final String key, int capacityPerHour) { double tokensPerMinute = (capacityPerHour / 60.0); // Get the last time this bucket was added to final String keyLastUpdate = String.format("%s:last_update",key); - long lastUpdate = longFromKey(CacheFactoryBean.rateLimitCache, keyLastUpdate); + long lastUpdate = longFromKey(rateLimitCache, keyLastUpdate); long deltaTime = currentTime - lastUpdate; // Get the current number of tokens in the bucket - long tokens = longFromKey(CacheFactoryBean.rateLimitCache, key); + long tokens = longFromKey(rateLimitCache, key); long tokensToAdd = (long) (deltaTime * tokensPerMinute); if (tokensToAdd > 0) { // Don't update timestamp if we aren't adding any tokens to the bucket tokens = min(capacityPerHour, tokens + tokensToAdd); - CacheFactoryBean.rateLimitCache.put(keyLastUpdate, String.valueOf(currentTime)); + rateLimitCache.put(keyLastUpdate, String.valueOf(currentTime)); } // Update with any added tokens and decrement 1 token for this call if not rate limited (0 tokens) - CacheFactoryBean.rateLimitCache.put(key, String.valueOf(max(0, tokens-1))); + rateLimitCache.put(key, String.valueOf(max(0, tokens-1))); return tokens < 1; } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 63c3f9e8bb8..a4d955dc64c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -108,6 +109,13 @@ public void setup() throws IOException { action = "cmd-" + UUID.randomUUID(); } + @AfterAll + public static void cleanup() { + if (cache != null) { + cache.cleanup(); // PreDestroy - shutdown Hazelcast + cache = null; + } + } @Test public void testGuestUserGettingRateLimited() { boolean rateLimited = false; @@ -169,4 +177,37 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio } assertTrue(!rateLimited && cnt == 200, "rateLimited:"+rateLimited + " cnt:"+cnt); } + + @Test + public void testCluster() { + //make sure at least 1 entry is in the original cache + cache.checkRate(authUser, action); + + // create a second cache to test cluster + CacheFactoryBean cache2 = new CacheFactoryBean(); + cache2.systemConfig = mockedSystemConfig; + cache2.init(); // PostConstruct - start Hazelcast + + // check to see if the new cache synced with the existing cache + long s1 = cache.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); + long s2 = cache2.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); + assertTrue(s1 > 0 && s1 == s2); + + String key = "key1"; + String value = "value1"; + // verify that both caches stay in sync + cache.setCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key, value); + assertTrue(value.equals(cache2.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); + // clearing one cache also clears the other cache in the cluster + cache2.clearCache(CacheFactoryBean.RATE_LIMIT_CACHE); + assertTrue(String.valueOf(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key)).isEmpty()); + + // verify no issue dropping one node from cluster + cache2.setCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key, value); + assertTrue(value.equals(cache2.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); + assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); + cache2.cleanup(); // remove cache2 + assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); + + } } From ecca881731c4796c7379db45e90fb868767c1058 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 30 Jan 2024 12:07:03 -0500 Subject: [PATCH 0863/1112] fixing unit tests --- .../edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index a4d955dc64c..6815d8b872b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -191,7 +191,7 @@ public void testCluster() { // check to see if the new cache synced with the existing cache long s1 = cache.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); long s2 = cache2.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); - assertTrue(s1 > 0 && s1 == s2); + assertTrue(s1 > 0 && s1 == s2, "Size1:" + s1 + " Size2:" + s2 ); String key = "key1"; String value = "value1"; From 11a37e39b10e4bf1a4ce1a09427c2d77ff9136db Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 30 Jan 2024 13:06:38 -0500 Subject: [PATCH 0864/1112] fixing unit tests --- .../iq/dataverse/cache/CacheFactoryBean.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 213ba429bdf..94fe6cd2a90 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -28,10 +28,18 @@ public class CacheFactoryBean implements java.io.Serializable { @PostConstruct public void init() { if (hazelcastInstance == null) { - Config hazelcastConfig = new Config(); - hazelcastConfig.setClusterName("dataverse"); - hazelcastConfig.getJetConfig().setEnabled(true); - hazelcastInstance = Hazelcast.newHazelcastInstance(hazelcastConfig); + Config config = new Config(); + config.setClusterName("dataverse"); + config.getJetConfig().setEnabled(true); + + config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(true); + config.getNetworkConfig().getJoin().getAwsConfig().setEnabled(false); + config.getNetworkConfig().getJoin().getAzureConfig().setEnabled(false); + // .setProperty("tag-key", "my-ec2-instance-tag-key") + // .setProperty("tag-value", "my-ec2-instance-tag-value"); + + + hazelcastInstance = Hazelcast.newHazelcastInstance(config); rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); } } From c84ae145945dedb368dfa7fbfdad318c0a176ea0 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 30 Jan 2024 13:23:11 -0500 Subject: [PATCH 0865/1112] fixing unit tests --- docker-compose-dev.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index b4a7a510839..0c29813f03b 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -59,6 +59,8 @@ services: - "4949:4848" # HTTPS (Payara Admin Console) - "9009:9009" # JDWP - "8686:8686" # JMX + - "5701:5701" # Hazelcast + - "5702:5702" # Hazelcast networks: - dataverse depends_on: From 4f8a39c3f98868c7e07e0eff78c62dec05d4cfd7 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 30 Jan 2024 15:52:57 -0500 Subject: [PATCH 0866/1112] fixing unit tests --- .../edu/harvard/iq/dataverse/cache/CacheFactoryBean.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 94fe6cd2a90..a3bcc1ae64a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -28,17 +28,18 @@ public class CacheFactoryBean implements java.io.Serializable { @PostConstruct public void init() { if (hazelcastInstance == null) { + // TODO: move config to a file (yml) Config config = new Config(); config.setClusterName("dataverse"); config.getJetConfig().setEnabled(true); - - config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(true); + config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); + config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true); + config.getNetworkConfig().getJoin().getTcpIpConfig().addMember("localhost:5701"); + config.getNetworkConfig().getJoin().getTcpIpConfig().addMember("localhost:5702"); config.getNetworkConfig().getJoin().getAwsConfig().setEnabled(false); config.getNetworkConfig().getJoin().getAzureConfig().setEnabled(false); // .setProperty("tag-key", "my-ec2-instance-tag-key") // .setProperty("tag-value", "my-ec2-instance-tag-value"); - - hazelcastInstance = Hazelcast.newHazelcastInstance(config); rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); } From 3d0e4383ba8664426ed86fa66c14560d01d72a9b Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 31 Jan 2024 11:58:45 -0500 Subject: [PATCH 0867/1112] fix test hazelcast config --- scripts/installer/as-setup.sh | 4 ++ .../iq/dataverse/cache/CacheFactoryBean.java | 52 +++++++++++++------ 2 files changed, 41 insertions(+), 15 deletions(-) diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh index c89bcb4ff4d..3eb81f553e7 100755 --- a/scripts/installer/as-setup.sh +++ b/scripts/installer/as-setup.sh @@ -128,6 +128,10 @@ function preliminary_setup() # so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ ) ./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector + + # set up rate limiting using hazelcast in TcpIp discovery mode + ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.hazelcast.join=TcpIp" + ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.hazelcast.members=localhost:5701,localhost:5702" } function final_setup(){ diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index a3bcc1ae64a..b7ec7f6736c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -11,6 +11,7 @@ import jakarta.ejb.Singleton; import jakarta.ejb.Startup; +import java.util.Arrays; import java.util.logging.Logger; import java.util.Map; @@ -22,25 +23,14 @@ public class CacheFactoryBean implements java.io.Serializable { private Map rateLimitCache; @EJB SystemConfig systemConfig; - public final static String RATE_LIMIT_CACHE = "rateLimitCache"; - + public enum JoinVia { + Multicast, TcpIp, AWS, Azure; + } @PostConstruct public void init() { if (hazelcastInstance == null) { - // TODO: move config to a file (yml) - Config config = new Config(); - config.setClusterName("dataverse"); - config.getJetConfig().setEnabled(true); - config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); - config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true); - config.getNetworkConfig().getJoin().getTcpIpConfig().addMember("localhost:5701"); - config.getNetworkConfig().getJoin().getTcpIpConfig().addMember("localhost:5702"); - config.getNetworkConfig().getJoin().getAwsConfig().setEnabled(false); - config.getNetworkConfig().getJoin().getAzureConfig().setEnabled(false); - // .setProperty("tag-key", "my-ec2-instance-tag-key") - // .setProperty("tag-value", "my-ec2-instance-tag-value"); - hazelcastInstance = Hazelcast.newHazelcastInstance(config); + hazelcastInstance = Hazelcast.newHazelcastInstance(getConfig()); rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); } } @@ -113,4 +103,36 @@ public void clearCache(String cacheName) { break; } } + + private Config getConfig() { + JoinVia joinVia; + try { + String join = System.getProperty("dataverse.hazelcast.join", "Multicast"); + joinVia = JoinVia.valueOf(join); + } catch (IllegalArgumentException e) { + logger.warning("dataverse.hazelcast.join must be one of " + JoinVia.values() + ". Defaulting to Multicast"); + joinVia = JoinVia.Multicast; + } + Config config = new Config(); + config.setClusterName("dataverse"); + config.getJetConfig().setEnabled(true); + if (joinVia == JoinVia.TcpIp) { + config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true); + String members = System.getProperty("dataverse.hazelcast.members", ""); + logger.info("dataverse.hazelcast.members: " + members); + try { + Arrays.stream(members.split(",")).forEach(m -> + config.getNetworkConfig().getJoin().getTcpIpConfig().addMember(m)); + } catch (IllegalArgumentException e) { + logger.warning("dataverse.hazelcast.members must contain at least 1 'host:port' entry, Defaulting to Multicast"); + joinVia = JoinVia.Multicast; + } + } + logger.info("dataverse.hazelcast.join:" + joinVia); + config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(joinVia == JoinVia.Multicast); + config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(joinVia == JoinVia.TcpIp); + config.getNetworkConfig().getJoin().getAwsConfig().setEnabled(joinVia == JoinVia.AWS); + config.getNetworkConfig().getJoin().getAzureConfig().setEnabled(joinVia == JoinVia.Azure); + return config; + } } From 176adbc778976e3606db32830dd4e40c23091c53 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 31 Jan 2024 14:07:16 -0500 Subject: [PATCH 0868/1112] fix test hazelcast config --- .../harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 6815d8b872b..9034d8c00b4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -29,6 +30,7 @@ public class CacheFactoryBeanTest { AuthenticatedUser authUser = new AuthenticatedUser(); GuestUser guestUser = GuestUser.get(); String action; + static final String staticHazelcastSystemProperties = "dataverse.hazelcast."; static final String settingJson = "{\n" + " \"rateLimits\":[\n" + " {\n" + @@ -72,8 +74,13 @@ public class CacheFactoryBeanTest { " ]\n" + "}"; + @BeforeAll + public static void setup() { + System.setProperty(staticHazelcastSystemProperties + "join", "TcpIp"); + System.setProperty(staticHazelcastSystemProperties + "members", "localhost:5701,localhost:5702"); + } @BeforeEach - public void setup() throws IOException { + public void init() throws IOException { // reuse cache and config for all tests if (cache == null) { mockedSystemConfig = mock(SystemConfig.class); From 403dc084cec57157a347eb4a83aca78d5eeab95a Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 31 Jan 2024 15:08:05 -0500 Subject: [PATCH 0869/1112] fix test hazelcast config --- doc/release-notes/9356-rate-limiting.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index d7b9d2defcf..8bae4b59de4 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -16,3 +16,10 @@ Tiers not specified in this setting will default to `-1` (No Limit). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. `curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}'` + +JVM properties to configure Hazelcast to work as a cluster. +By default, Hazelcast uses Multicast to discover cluster members see https://docs.hazelcast.com/imdg/4.2/clusters/discovery-mechanisms +Valid join types: Multicast or TcpIp +Members can be listed in a CSV field of 'host:port' for each dataverse app instance +-Ddataverse.hazelcast.join=TcpIp +-Ddataverse.hazelcast.members=localhost:5701,localhost:5702 \ No newline at end of file From 9e43b25d67ab0726b055b4bc816e4844b0da9fa0 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 31 Jan 2024 15:58:19 -0500 Subject: [PATCH 0870/1112] fix test hazelcast config --- .../java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index 1e676adfe03..446a8b4b712 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -22,8 +22,8 @@ public class RateLimitUtil { private static final Logger logger = Logger.getLogger(RateLimitUtil.class.getCanonicalName()); - protected static final List rateLimits = new CopyOnWriteArrayList<>(); - protected static final Map rateLimitMap = new ConcurrentHashMap<>(); + static final List rateLimits = new CopyOnWriteArrayList<>(); + static final Map rateLimitMap = new ConcurrentHashMap<>(); private static final Gson gson = new Gson(); public static final int NO_LIMIT = -1; From 0771fae20d7d394fa3b2b1a03b350d925127c883 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 1 Feb 2024 11:50:45 -0500 Subject: [PATCH 0871/1112] fixing more review comments --- .../iq/dataverse/cache/RateLimitUtil.java | 43 +++++++++++-------- .../iq/dataverse/util/SystemConfig.java | 19 +------- .../dataverse/cache/CacheFactoryBeanTest.java | 8 +--- .../iq/dataverse/cache/RateLimitUtilTest.java | 18 +++++--- 4 files changed, 41 insertions(+), 47 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index 446a8b4b712..b710138865f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -4,7 +4,6 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.json.Json; import jakarta.json.JsonArray; @@ -27,11 +26,7 @@ public class RateLimitUtil { private static final Gson gson = new Gson(); public static final int NO_LIMIT = -1; - protected static int getCapacityByTier(SystemConfig systemConfig, int tier) { - return systemConfig.getIntFromCSVStringOrDefault(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, tier, NO_LIMIT); - } - - protected static String generateCacheKey(final User user, final String action) { + static String generateCacheKey(final User user, final String action) { StringBuffer id = new StringBuffer(); id.append(user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()); if (action != null) { @@ -39,7 +34,7 @@ protected static String generateCacheKey(final User user, final String action) { } return id.toString(); } - protected static int getCapacity(SystemConfig systemConfig, User user, String action) { + static int getCapacity(SystemConfig systemConfig, User user, String action) { if (user != null && user.isSuperuser()) { return NO_LIMIT; }; @@ -48,7 +43,7 @@ protected static int getCapacity(SystemConfig systemConfig, User user, String ac getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : getCapacityByTierAndAction(systemConfig, 0, action); } - protected static boolean rateLimited(final Map rateLimitCache, final String key, int capacityPerHour) { + static boolean rateLimited(final Map rateLimitCache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; } @@ -70,7 +65,7 @@ protected static boolean rateLimited(final Map rateLimitCache, final String key, return tokens < 1; } - protected static int getCapacityByTierAndAction(SystemConfig systemConfig, Integer tier, String action) { + static int getCapacityByTierAndAction(SystemConfig systemConfig, Integer tier, String action) { if (rateLimits.isEmpty()) { init(systemConfig); } @@ -79,8 +74,22 @@ protected static int getCapacityByTierAndAction(SystemConfig systemConfig, Integ rateLimitMap.containsKey(getMapKey(tier)) ? rateLimitMap.get(getMapKey(tier)) : getCapacityByTier(systemConfig, tier); } - - private static void init(SystemConfig systemConfig) { + static int getCapacityByTier(SystemConfig systemConfig, int tier) { + int value = NO_LIMIT; + String csvString = systemConfig.getRateLimitingDefaultCapacityTiers(); + try { + if (!csvString.isEmpty()) { + int[] values = Arrays.stream(csvString.split(",")).mapToInt(Integer::parseInt).toArray(); + if (tier < values.length) { + value = values[tier]; + } + } + } catch (NumberFormatException nfe) { + logger.warning(nfe.getMessage()); + } + return value; + } + static void init(SystemConfig systemConfig) { getRateLimitsFromJson(systemConfig); /* Convert the List of Rate Limit Settings containing a list of Actions to a fast lookup Map where the key is: for default if no action defined: "{tier}:" and the value is the default limit for the tier @@ -92,8 +101,7 @@ private static void init(SystemConfig systemConfig) { r.getActions().forEach(a -> rateLimitMap.put(getMapKey(r.getTier(), a), r.getLimitPerHour())); }); } - - private static void getRateLimitsFromJson(SystemConfig systemConfig) { + static void getRateLimitsFromJson(SystemConfig systemConfig) { String setting = systemConfig.getRateLimitsJson(); if (!setting.isEmpty() && rateLimits.isEmpty()) { try { @@ -108,12 +116,10 @@ private static void getRateLimitsFromJson(SystemConfig systemConfig) { } } } - - private static String getMapKey(int tier) { + static String getMapKey(int tier) { return getMapKey(tier, null); } - - private static String getMapKey(int tier, String action) { + static String getMapKey(int tier, String action) { StringBuffer key = new StringBuffer(); key.append(tier).append(":"); if (action != null) { @@ -121,8 +127,7 @@ private static String getMapKey(int tier, String action) { } return key.toString(); } - - private static long longFromKey(Map cache, String key) { + static long longFromKey(Map cache, String key) { Object l = cache.get(key); return l != null ? Long.parseLong(String.valueOf(l)) : 0L; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 9f4bd7c2e62..b388e978808 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1160,22 +1160,7 @@ public boolean isStoringIngestedFilesWithHeaders() { public String getRateLimitsJson() { return settingsService.getValueForKey(SettingsServiceBean.Key.RateLimitingCapacityByTierAndAction, ""); } - - public int getIntFromCSVStringOrDefault(final SettingsServiceBean.Key settingKey, int index, int defaultValue) { - int value = defaultValue; - if (settingKey != null && !settingKey.equals("")) { - String csv = settingsService.getValueForKey(settingKey, ""); - try { - if (!csv.isEmpty()) { - int[] values = Arrays.stream(csv.split(",")).mapToInt(Integer::parseInt).toArray(); - if (index < values.length) { - value = values[index]; - } - } - } catch (NumberFormatException nfe) { - logger.warning(nfe.getMessage()); - } - } - return value; + public String getRateLimitingDefaultCapacityTiers() { + return settingsService.getValueForKey(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, ""); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 9034d8c00b4..1b4c7e973af 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -17,7 +16,6 @@ import java.util.UUID; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -31,6 +29,7 @@ public class CacheFactoryBeanTest { GuestUser guestUser = GuestUser.get(); String action; static final String staticHazelcastSystemProperties = "dataverse.hazelcast."; + static final String settingDefaultCapacity = "30,60,120"; static final String settingJson = "{\n" + " \"rateLimits\":[\n" + " {\n" + @@ -84,10 +83,7 @@ public void init() throws IOException { // reuse cache and config for all tests if (cache == null) { mockedSystemConfig = mock(SystemConfig.class); - doReturn(30).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(0), eq(RateLimitUtil.NO_LIMIT)); - doReturn(60).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(1), eq(RateLimitUtil.NO_LIMIT)); - doReturn(120).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(2), eq(RateLimitUtil.NO_LIMIT)); - doReturn(RateLimitUtil.NO_LIMIT).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(3), eq(RateLimitUtil.NO_LIMIT)); + doReturn(settingDefaultCapacity).when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); cache = new CacheFactoryBean(); cache.systemConfig = mockedSystemConfig; diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java index a7825481ade..033f9dbb67e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -13,7 +12,6 @@ import org.mockito.quality.Strictness; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; @ExtendWith(MockitoExtension.class) @@ -69,9 +67,7 @@ public class RateLimitUtilTest { @BeforeEach public void setup() { mockedSystemConfig = mock(SystemConfig.class); - doReturn(100).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(0), eq(RateLimitUtil.NO_LIMIT)); - doReturn(200).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(1), eq(RateLimitUtil.NO_LIMIT)); - doReturn(RateLimitUtil.NO_LIMIT).when(mockedSystemConfig).getIntFromCSVStringOrDefault(eq(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers),eq(2), eq(RateLimitUtil.NO_LIMIT)); + doReturn("100,200").when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); // clear the static data so it can be reloaded with the new mocked data RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); @@ -115,5 +111,17 @@ public void testGetCapacity() { assertEquals(30, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); authUser.setSuperuser(true); assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); + + // no setting means rate limiting is not on + doReturn("").when(mockedSystemConfig).getRateLimitsJson(); + doReturn("").when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); + RateLimitUtil.rateLimitMap.clear(); + RateLimitUtil.rateLimits.clear(); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, guestUser, "GetPrivateUrlCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, guestUser, "xyz")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "abc")); + authUser.setRateLimitTier(99); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "def")); } } From 252337a8373aeff6e803de2f54ec430f42e2c912 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 1 Feb 2024 14:21:58 -0500 Subject: [PATCH 0872/1112] fix db rate limit tier column --- src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java | 2 +- .../iq/dataverse/authorization/users/AuthenticatedUser.java | 5 ++++- .../db/migration/V6.1.0.2__9356-add-rate-limiting.sql | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java index 47aebb78a35..d63fcfa3e34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java @@ -147,7 +147,7 @@ private AuthenticatedUser createAuthenticatedUserForView (Object[] dbRowValues, user.setMutedEmails(Type.tokenizeToSet((String) dbRowValues[15])); user.setMutedNotifications(Type.tokenizeToSet((String) dbRowValues[15])); - user.setRateLimitTier((int)dbRowValues[16]); + user.setRateLimitTier((int)dbRowValues[17]); user.setRoles(roles); return user; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 0ed036afc6b..6abcb350222 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -16,6 +16,8 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonPrinter; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; +import static java.lang.Math.max; + import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.io.Serializable; import java.sql.Timestamp; @@ -146,19 +148,20 @@ public class AuthenticatedUser implements User, Serializable { @Transient private Set mutedNotificationsSet = new HashSet<>(); - @Column private int rateLimitTier; @PrePersist void prePersist() { mutedNotifications = Type.toStringValue(mutedNotificationsSet); mutedEmails = Type.toStringValue(mutedEmailsSet); + rateLimitTier = max(1,rateLimitTier); // db column defaults to 1 (minimum value for a tier). } @PostLoad public void initialize() { mutedNotificationsSet = Type.tokenizeToSet(mutedNotifications); mutedEmailsSet = Type.tokenizeToSet(mutedEmails); + rateLimitTier = max(1,rateLimitTier); // db column defaults to 1 (minimum value for a tier). } /** diff --git a/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql index ae30fd96bfd..be370625b3f 100644 --- a/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql +++ b/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql @@ -1 +1,2 @@ -ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS ratelimittier int DEFAULT 1; \ No newline at end of file +ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS ratelimittier int DEFAULT 1; +UPDATE authenticateduser set ratelimittier = 1 WHERE ratelimittier = 0; \ No newline at end of file From cc70ba7f1886a7f9dc62470706e0e0df5ebf5fdd Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 1 Feb 2024 14:37:51 -0500 Subject: [PATCH 0873/1112] fix db rate limit tier column --- scripts/installer/installAppServer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py index 03abc03b05e..698f5ba9a58 100644 --- a/scripts/installer/installAppServer.py +++ b/scripts/installer/installAppServer.py @@ -29,6 +29,7 @@ def runAsadminScript(config): os.environ['DOI_USERNAME'] = config.get('doi','DOI_USERNAME') os.environ['DOI_PASSWORD'] = config.get('doi','DOI_PASSWORD') os.environ['DOI_DATACITERESTAPIURL'] = config.get('doi','DOI_DATACITERESTAPIURL') + mailServerEntry = config.get('system','MAIL_SERVER') try: From 794f0243f4c4bf100e90095c189df48c15bffb36 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 1 Feb 2024 14:40:58 -0500 Subject: [PATCH 0874/1112] fix db rate limit tier column --- .../resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql index be370625b3f..470483e2bf4 100644 --- a/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql +++ b/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql @@ -1,2 +1 @@ ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS ratelimittier int DEFAULT 1; -UPDATE authenticateduser set ratelimittier = 1 WHERE ratelimittier = 0; \ No newline at end of file From 605097c1dd49a9526a3183cee6c05db154d33378 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 2 Feb 2024 10:20:45 -0500 Subject: [PATCH 0875/1112] getting tests to pass on Jenkins --- doc/release-notes/9356-rate-limiting.md | 7 +++++-- pom.xml | 4 ++-- .../iq/dataverse/cache/CacheFactoryBean.java | 8 ++++---- .../dataverse/cache/CacheFactoryBeanTest.java | 19 +++++++++++++++---- 4 files changed, 26 insertions(+), 12 deletions(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 8bae4b59de4..098b20a20aa 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -19,7 +19,10 @@ In the following example, calls made by a guest user (tier 0) for API `GetLatest JVM properties to configure Hazelcast to work as a cluster. By default, Hazelcast uses Multicast to discover cluster members see https://docs.hazelcast.com/imdg/4.2/clusters/discovery-mechanisms -Valid join types: Multicast or TcpIp -Members can be listed in a CSV field of 'host:port' for each dataverse app instance +and the cluster name defaults to 'dataverse' +Cluster name can be configured using +-Ddataverse.hazelcast.cluster=dataverse-test +Valid join types: Multicast, TcpIp, AWS, or Azure +TcpIp member IPs can be listed in a CSV field of 'host:port' for each dataverse app instance -Ddataverse.hazelcast.join=TcpIp -Ddataverse.hazelcast.members=localhost:5701,localhost:5702 \ No newline at end of file diff --git a/pom.xml b/pom.xml index 4a2bc13dbc7..a90f76e2034 100644 --- a/pom.xml +++ b/pom.xml @@ -549,8 +549,8 @@ com.hazelcast - hazelcast - 5.3.6 + hazelcast-all + 4.0.2 xerces diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index b7ec7f6736c..d060b191c36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -30,7 +30,7 @@ public enum JoinVia { @PostConstruct public void init() { if (hazelcastInstance == null) { - hazelcastInstance = Hazelcast.newHazelcastInstance(getConfig()); + hazelcastInstance = Hazelcast.newHazelcastInstance(getHazelcastConfig()); rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); } } @@ -104,7 +104,7 @@ public void clearCache(String cacheName) { } } - private Config getConfig() { + private Config getHazelcastConfig() { JoinVia joinVia; try { String join = System.getProperty("dataverse.hazelcast.join", "Multicast"); @@ -114,8 +114,8 @@ private Config getConfig() { joinVia = JoinVia.Multicast; } Config config = new Config(); - config.setClusterName("dataverse"); - config.getJetConfig().setEnabled(true); + String clusterName = System.getProperty("dataverse.hazelcast.cluster", "dataverse"); + config.setClusterName(clusterName); if (joinVia == JoinVia.TcpIp) { config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true); String members = System.getProperty("dataverse.hazelcast.members", ""); diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 1b4c7e973af..41e4c556312 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -25,6 +25,8 @@ public class CacheFactoryBeanTest { private SystemConfig mockedSystemConfig; static CacheFactoryBean cache = null; + // Second instance for cluster testing + static CacheFactoryBean cache2 = null; AuthenticatedUser authUser = new AuthenticatedUser(); GuestUser guestUser = GuestUser.get(); String action; @@ -75,8 +77,14 @@ public class CacheFactoryBeanTest { @BeforeAll public static void setup() { - System.setProperty(staticHazelcastSystemProperties + "join", "TcpIp"); - System.setProperty(staticHazelcastSystemProperties + "members", "localhost:5701,localhost:5702"); + System.setProperty(staticHazelcastSystemProperties + "cluster", "dataverse-test"); + if (System.getenv("JENKINS_HOME") != null) { + System.setProperty(staticHazelcastSystemProperties + "join", "AWS"); + } else { + System.setProperty(staticHazelcastSystemProperties + "join", "Multicast"); + } + //System.setProperty(staticHazelcastSystemProperties + "join", "TcpIp"); + //System.setProperty(staticHazelcastSystemProperties + "members", "localhost:5701,localhost:5702"); } @BeforeEach public void init() throws IOException { @@ -118,6 +126,10 @@ public static void cleanup() { cache.cleanup(); // PreDestroy - shutdown Hazelcast cache = null; } + if (cache2 != null) { + cache2.cleanup(); // PreDestroy - shutdown Hazelcast + cache2 = null; + } } @Test public void testGuestUserGettingRateLimited() { @@ -187,7 +199,7 @@ public void testCluster() { cache.checkRate(authUser, action); // create a second cache to test cluster - CacheFactoryBean cache2 = new CacheFactoryBean(); + cache2 = new CacheFactoryBean(); cache2.systemConfig = mockedSystemConfig; cache2.init(); // PostConstruct - start Hazelcast @@ -211,6 +223,5 @@ public void testCluster() { assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); cache2.cleanup(); // remove cache2 assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); - } } From 879bc5cf4703b8ce4854a4dd1d43f47f268f3922 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 2 Feb 2024 11:22:33 -0500 Subject: [PATCH 0876/1112] testing in jenkins --- scripts/installer/as-setup.sh | 3 +-- .../harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 7 +++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh index 3eb81f553e7..94f088cc9df 100755 --- a/scripts/installer/as-setup.sh +++ b/scripts/installer/as-setup.sh @@ -130,8 +130,7 @@ function preliminary_setup() ./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector # set up rate limiting using hazelcast in TcpIp discovery mode - ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.hazelcast.join=TcpIp" - ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.hazelcast.members=localhost:5701,localhost:5702" + ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.hazelcast.join=Multicast" } function final_setup(){ diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 41e4c556312..be967ec23cc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -78,10 +78,9 @@ public class CacheFactoryBeanTest { @BeforeAll public static void setup() { System.setProperty(staticHazelcastSystemProperties + "cluster", "dataverse-test"); + System.setProperty(staticHazelcastSystemProperties + "join", "Multicast"); if (System.getenv("JENKINS_HOME") != null) { - System.setProperty(staticHazelcastSystemProperties + "join", "AWS"); - } else { - System.setProperty(staticHazelcastSystemProperties + "join", "Multicast"); + // System.setProperty(staticHazelcastSystemProperties + "join", "AWS"); } //System.setProperty(staticHazelcastSystemProperties + "join", "TcpIp"); //System.setProperty(staticHazelcastSystemProperties + "members", "localhost:5701,localhost:5702"); @@ -97,7 +96,7 @@ public void init() throws IOException { cache.systemConfig = mockedSystemConfig; cache.init(); // PostConstruct - start Hazelcast - // clear the static data so it can be reloaded with the new mocked data + // clear the static data, so it can be reloaded with the new mocked data RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); From 27cce94b4fa502b67533bfdda3b7750fbbca9691 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 2 Feb 2024 13:35:24 -0500 Subject: [PATCH 0877/1112] use payara instance of hazelcast --- doc/release-notes/9356-rate-limiting.md | 10 +-- docker-compose-dev.yml | 2 - scripts/installer/as-setup.sh | 3 - .../iq/dataverse/cache/CacheFactoryBean.java | 63 +++---------------- .../dataverse/cache/CacheFactoryBeanTest.java | 35 +++++------ 5 files changed, 23 insertions(+), 90 deletions(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 098b20a20aa..3281e80beed 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -17,12 +17,4 @@ This allows for more control over the rate limit of individual API command calls In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. `curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}'` -JVM properties to configure Hazelcast to work as a cluster. -By default, Hazelcast uses Multicast to discover cluster members see https://docs.hazelcast.com/imdg/4.2/clusters/discovery-mechanisms -and the cluster name defaults to 'dataverse' -Cluster name can be configured using --Ddataverse.hazelcast.cluster=dataverse-test -Valid join types: Multicast, TcpIp, AWS, or Azure -TcpIp member IPs can be listed in a CSV field of 'host:port' for each dataverse app instance --Ddataverse.hazelcast.join=TcpIp --Ddataverse.hazelcast.members=localhost:5701,localhost:5702 \ No newline at end of file +Hazelcast is configured in Payara and should not need any changes for this feature \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 0c29813f03b..b4a7a510839 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -59,8 +59,6 @@ services: - "4949:4848" # HTTPS (Payara Admin Console) - "9009:9009" # JDWP - "8686:8686" # JMX - - "5701:5701" # Hazelcast - - "5702:5702" # Hazelcast networks: - dataverse depends_on: diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh index 94f088cc9df..c89bcb4ff4d 100755 --- a/scripts/installer/as-setup.sh +++ b/scripts/installer/as-setup.sh @@ -128,9 +128,6 @@ function preliminary_setup() # so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ ) ./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector - - # set up rate limiting using hazelcast in TcpIp discovery mode - ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.hazelcast.join=Multicast" } function final_setup(){ diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index d060b191c36..d3837ea8c9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -1,17 +1,14 @@ package edu.harvard.iq.dataverse.cache; -import com.hazelcast.config.Config; -import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstance; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.annotation.PostConstruct; -import jakarta.annotation.PreDestroy; import jakarta.ejb.EJB; import jakarta.ejb.Singleton; import jakarta.ejb.Startup; +import jakarta.inject.Inject; -import java.util.Arrays; import java.util.logging.Logger; import java.util.Map; @@ -19,32 +16,18 @@ @Startup public class CacheFactoryBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); - private HazelcastInstance hazelcastInstance = null; private Map rateLimitCache; @EJB SystemConfig systemConfig; + @Inject + HazelcastInstance hzInstance; public final static String RATE_LIMIT_CACHE = "rateLimitCache"; - public enum JoinVia { - Multicast, TcpIp, AWS, Azure; - } + @PostConstruct public void init() { - if (hazelcastInstance == null) { - hazelcastInstance = Hazelcast.newHazelcastInstance(getHazelcastConfig()); - rateLimitCache = hazelcastInstance.getMap(RATE_LIMIT_CACHE); - } - } - @PreDestroy - protected void cleanup() { - if (hazelcastInstance != null) { - hazelcastInstance.shutdown(); - hazelcastInstance = null; - } - } - @Override - protected void finalize() throws Throwable { - cleanup(); - super.finalize(); + logger.info("Hazelcast member:" + hzInstance.getCluster().getLocalMember()); + rateLimitCache = hzInstance.getMap(RATE_LIMIT_CACHE); + logger.info("Rate Limit Cache Size: " + rateLimitCache.size()); } /** @@ -103,36 +86,4 @@ public void clearCache(String cacheName) { break; } } - - private Config getHazelcastConfig() { - JoinVia joinVia; - try { - String join = System.getProperty("dataverse.hazelcast.join", "Multicast"); - joinVia = JoinVia.valueOf(join); - } catch (IllegalArgumentException e) { - logger.warning("dataverse.hazelcast.join must be one of " + JoinVia.values() + ". Defaulting to Multicast"); - joinVia = JoinVia.Multicast; - } - Config config = new Config(); - String clusterName = System.getProperty("dataverse.hazelcast.cluster", "dataverse"); - config.setClusterName(clusterName); - if (joinVia == JoinVia.TcpIp) { - config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true); - String members = System.getProperty("dataverse.hazelcast.members", ""); - logger.info("dataverse.hazelcast.members: " + members); - try { - Arrays.stream(members.split(",")).forEach(m -> - config.getNetworkConfig().getJoin().getTcpIpConfig().addMember(m)); - } catch (IllegalArgumentException e) { - logger.warning("dataverse.hazelcast.members must contain at least 1 'host:port' entry, Defaulting to Multicast"); - joinVia = JoinVia.Multicast; - } - } - logger.info("dataverse.hazelcast.join:" + joinVia); - config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(joinVia == JoinVia.Multicast); - config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(joinVia == JoinVia.TcpIp); - config.getNetworkConfig().getJoin().getAwsConfig().setEnabled(joinVia == JoinVia.AWS); - config.getNetworkConfig().getJoin().getAzureConfig().setEnabled(joinVia == JoinVia.Azure); - return config; - } } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index be967ec23cc..5063269695d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -1,10 +1,11 @@ package edu.harvard.iq.dataverse.cache; +import com.hazelcast.config.Config; +import com.hazelcast.core.*; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -75,16 +76,6 @@ public class CacheFactoryBeanTest { " ]\n" + "}"; - @BeforeAll - public static void setup() { - System.setProperty(staticHazelcastSystemProperties + "cluster", "dataverse-test"); - System.setProperty(staticHazelcastSystemProperties + "join", "Multicast"); - if (System.getenv("JENKINS_HOME") != null) { - // System.setProperty(staticHazelcastSystemProperties + "join", "AWS"); - } - //System.setProperty(staticHazelcastSystemProperties + "join", "TcpIp"); - //System.setProperty(staticHazelcastSystemProperties + "members", "localhost:5701,localhost:5702"); - } @BeforeEach public void init() throws IOException { // reuse cache and config for all tests @@ -94,7 +85,10 @@ public void init() throws IOException { doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); cache = new CacheFactoryBean(); cache.systemConfig = mockedSystemConfig; - cache.init(); // PostConstruct - start Hazelcast + if (cache.hzInstance == null) { + cache.hzInstance = Hazelcast.newHazelcastInstance(new Config()); + } + cache.init(); // PostConstruct - set up Hazelcast // clear the static data, so it can be reloaded with the new mocked data RateLimitUtil.rateLimitMap.clear(); @@ -121,13 +115,11 @@ public void init() throws IOException { @AfterAll public static void cleanup() { - if (cache != null) { - cache.cleanup(); // PreDestroy - shutdown Hazelcast - cache = null; + if (cache != null && cache.hzInstance != null) { + cache.hzInstance.shutdown(); } - if (cache2 != null) { - cache2.cleanup(); // PreDestroy - shutdown Hazelcast - cache2 = null; + if (cache2 != null && cache2.hzInstance != null) { + cache2.hzInstance.shutdown(); } } @Test @@ -200,7 +192,10 @@ public void testCluster() { // create a second cache to test cluster cache2 = new CacheFactoryBean(); cache2.systemConfig = mockedSystemConfig; - cache2.init(); // PostConstruct - start Hazelcast + if (cache2.hzInstance == null) { + cache2.hzInstance = Hazelcast.newHazelcastInstance(new Config()); + } + cache2.init(); // PostConstruct - set up Hazelcast // check to see if the new cache synced with the existing cache long s1 = cache.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); @@ -220,7 +215,7 @@ public void testCluster() { cache2.setCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key, value); assertTrue(value.equals(cache2.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); - cache2.cleanup(); // remove cache2 + cache2.hzInstance.shutdown(); // remove cache2 assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); } } From 9784416fe7670b78911db534e592e32f8b42d692 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 2 Feb 2024 15:59:23 -0500 Subject: [PATCH 0878/1112] fixes for Jenkins --- .../dataverse/cache/CacheFactoryBeanTest.java | 36 ++++++++++++------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 5063269695d..73e521c810c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.cache; +import com.hazelcast.cluster.Address; import com.hazelcast.config.Config; import com.hazelcast.core.*; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -78,7 +79,7 @@ public class CacheFactoryBeanTest { @BeforeEach public void init() throws IOException { - // reuse cache and config for all tests + // Reuse cache and config for all tests if (cache == null) { mockedSystemConfig = mock(SystemConfig.class); doReturn(settingDefaultCapacity).when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); @@ -90,11 +91,11 @@ public void init() throws IOException { } cache.init(); // PostConstruct - set up Hazelcast - // clear the static data, so it can be reloaded with the new mocked data + // Clear the static data, so it can be reloaded with the new mocked data RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); - // testing cache implementation and code coverage + // Testing cache implementation and code coverage final String cacheKey = "CacheTestKey" + UUID.randomUUID(); final String cacheValue = "CacheTestValue" + UUID.randomUUID(); long cacheSize = cache.getCacheSize(cache.RATE_LIMIT_CACHE); @@ -104,12 +105,12 @@ public void init() throws IOException { assertTrue(cacheValueObj != null && cacheValue.equalsIgnoreCase((String) cacheValueObj)); } - // reset to default auth user + // Reset to default auth user authUser.setRateLimitTier(1); authUser.setSuperuser(false); authUser.setUserIdentifier("authUser"); - // create a unique action for each test + // Create a unique action for each test action = "cmd-" + UUID.randomUUID(); } @@ -165,7 +166,7 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio assertTrue(rateLimited && cnt == 120, "rateLimited:"+rateLimited + " cnt:"+cnt); for (cnt = 0; cnt <60; cnt++) { - Thread.sleep(1000);// wait for bucket to be replenished (check each second for 1 minute max) + Thread.sleep(1000);// Wait for bucket to be replenished (check each second for 1 minute max) rateLimited = !cache.checkRate(authUser, action); if (!rateLimited) { break; @@ -186,36 +187,45 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio @Test public void testCluster() { - //make sure at least 1 entry is in the original cache + // Make sure at least 1 entry is in the original cache cache.checkRate(authUser, action); - // create a second cache to test cluster + // Create a second cache to test cluster cache2 = new CacheFactoryBean(); cache2.systemConfig = mockedSystemConfig; if (cache2.hzInstance == null) { cache2.hzInstance = Hazelcast.newHazelcastInstance(new Config()); + + // Needed for Jenkins to form cluster based on TcpIp since Multicast fails + Address m1 = cache.hzInstance.getCluster().getLocalMember().getAddress(); + Address m2 = cache2.hzInstance.getCluster().getLocalMember().getAddress(); + String members = String.format("%s:%d,%s:%d", m1.getHost(),m1.getPort(),m2.getHost(),m2.getPort()); + cache.hzInstance.getConfig().getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true).addMember(members); + cache2.hzInstance.getConfig().getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true).addMember(members); } cache2.init(); // PostConstruct - set up Hazelcast - // check to see if the new cache synced with the existing cache + // Check to see if the new cache synced with the existing cache long s1 = cache.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); long s2 = cache2.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); assertTrue(s1 > 0 && s1 == s2, "Size1:" + s1 + " Size2:" + s2 ); String key = "key1"; String value = "value1"; - // verify that both caches stay in sync + // Verify that both caches stay in sync cache.setCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key, value); assertTrue(value.equals(cache2.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); - // clearing one cache also clears the other cache in the cluster + // Clearing one cache also clears the other cache in the cluster cache2.clearCache(CacheFactoryBean.RATE_LIMIT_CACHE); assertTrue(String.valueOf(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key)).isEmpty()); - // verify no issue dropping one node from cluster + // Verify no issue dropping one node from cluster cache2.setCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key, value); assertTrue(value.equals(cache2.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); - cache2.hzInstance.shutdown(); // remove cache2 + // Shut down hazelcast on cache2 and make sure data is still available in original cache + cache2.hzInstance.shutdown(); + cache2 = null; assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); } } From 21b095176a9fbd5f15f632198934a760db950706 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 2 Feb 2024 16:03:32 -0500 Subject: [PATCH 0879/1112] fixes for Jenkins --- docker-compose-dev.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index b4a7a510839..ae0aa2bdf76 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -232,6 +232,7 @@ services: MINIO_ROOT_USER: 4cc355_k3y MINIO_ROOT_PASSWORD: s3cr3t_4cc355_k3y command: server /data + networks: dataverse: driver: bridge From 465c5d5901318da19638223bb035de49b9d6b99b Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 2 Feb 2024 17:04:05 -0500 Subject: [PATCH 0880/1112] fixes for Jenkins --- .../dataverse/cache/CacheFactoryBeanTest.java | 33 ++++++++++++++----- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 73e521c810c..96a9b58315f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -16,6 +16,7 @@ import java.io.IOException; import java.util.UUID; +import java.util.logging.Logger; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; @@ -24,7 +25,7 @@ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class CacheFactoryBeanTest { - + private static final Logger logger = Logger.getLogger(CacheFactoryBeanTest.class.getCanonicalName()); private SystemConfig mockedSystemConfig; static CacheFactoryBean cache = null; // Second instance for cluster testing @@ -87,7 +88,7 @@ public void init() throws IOException { cache = new CacheFactoryBean(); cache.systemConfig = mockedSystemConfig; if (cache.hzInstance == null) { - cache.hzInstance = Hazelcast.newHazelcastInstance(new Config()); + cache.hzInstance = Hazelcast.newHazelcastInstance(getConfig()); } cache.init(); // PostConstruct - set up Hazelcast @@ -194,14 +195,11 @@ public void testCluster() { cache2 = new CacheFactoryBean(); cache2.systemConfig = mockedSystemConfig; if (cache2.hzInstance == null) { - cache2.hzInstance = Hazelcast.newHazelcastInstance(new Config()); - // Needed for Jenkins to form cluster based on TcpIp since Multicast fails - Address m1 = cache.hzInstance.getCluster().getLocalMember().getAddress(); - Address m2 = cache2.hzInstance.getCluster().getLocalMember().getAddress(); - String members = String.format("%s:%d,%s:%d", m1.getHost(),m1.getPort(),m2.getHost(),m2.getPort()); - cache.hzInstance.getConfig().getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true).addMember(members); - cache2.hzInstance.getConfig().getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true).addMember(members); + Address initialCache = cache.hzInstance.getCluster().getLocalMember().getAddress(); + String members = String.format("%s:%d", initialCache.getHost(),initialCache.getPort()); + logger.info("Switching to TcpIp mode with members: " + members); + cache2.hzInstance = Hazelcast.newHazelcastInstance(getConfig(members)); } cache2.init(); // PostConstruct - set up Hazelcast @@ -228,4 +226,21 @@ public void testCluster() { cache2 = null; assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); } + + private Config getConfig() { + return getConfig(null); + } + private Config getConfig(String members) { + Config config = new Config(); + config.getNetworkConfig().getJoin().getAutoDetectionConfig().setEnabled(false); + config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); + config.getNetworkConfig().getJoin().getAwsConfig().setEnabled(false); + config.getNetworkConfig().getJoin().getAzureConfig().setEnabled(false); + config.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true); + if (members != null) { + config.getNetworkConfig().getJoin().getAutoDetectionConfig().setEnabled(true); + config.getNetworkConfig().getJoin().getTcpIpConfig().addMember(members); + } + return config; + } } From e5fe18fc3c454df194b82cdeb97513f23feb18f4 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 2 Feb 2024 17:08:52 -0500 Subject: [PATCH 0881/1112] fixes for Jenkins --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index a90f76e2034..4a2bc13dbc7 100644 --- a/pom.xml +++ b/pom.xml @@ -549,8 +549,8 @@ com.hazelcast - hazelcast-all - 4.0.2 + hazelcast + 5.3.6 xerces From 15ef82eb4241248864e0e411a545b9388ea9f004 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:14:42 -0500 Subject: [PATCH 0882/1112] Update pom.xml Co-authored-by: Oliver Bertuch --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 4a2bc13dbc7..e229b35fd0a 100644 --- a/pom.xml +++ b/pom.xml @@ -550,7 +550,7 @@ com.hazelcast hazelcast - 5.3.6 + provided xerces From 77cede2bb9848037a5ded7ed8f20635bec1fd935 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:18:24 -0500 Subject: [PATCH 0883/1112] Update src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java Co-authored-by: Oliver Bertuch --- .../edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 96a9b58315f..fd05f216eb0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -33,7 +33,6 @@ public class CacheFactoryBeanTest { AuthenticatedUser authUser = new AuthenticatedUser(); GuestUser guestUser = GuestUser.get(); String action; - static final String staticHazelcastSystemProperties = "dataverse.hazelcast."; static final String settingDefaultCapacity = "30,60,120"; static final String settingJson = "{\n" + " \"rateLimits\":[\n" + From 02cd0d03581443500366d2ff835e18e6f8d7c661 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:19:33 -0500 Subject: [PATCH 0884/1112] Update pom.xml Co-authored-by: Oliver Bertuch --- pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/pom.xml b/pom.xml index e229b35fd0a..529d2fa35c3 100644 --- a/pom.xml +++ b/pom.xml @@ -542,11 +542,6 @@ dataverse-spi 2.0.0 - - javax.cache - cache-api - 1.1.1 - com.hazelcast hazelcast From 9b95e4db7f289087749e88c8760bc5f9e3cace49 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:20:07 -0500 Subject: [PATCH 0885/1112] Update src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java Co-authored-by: Oliver Bertuch --- .../java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index d3837ea8c9e..4282a77b6af 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -16,6 +16,7 @@ @Startup public class CacheFactoryBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); + // Retrieved from Hazelcast, implements ConcurrentMap and is threadsafe private Map rateLimitCache; @EJB SystemConfig systemConfig; From 52e714bd1b8ecf12a163d24535171d09bd33260a Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 6 Feb 2024 12:29:37 -0500 Subject: [PATCH 0886/1112] review comments re: JCache --- .../source/installation/config.rst | 4 +- pom.xml | 17 +- .../iq/dataverse/cache/CacheFactoryBean.java | 61 ++------ .../iq/dataverse/cache/RateLimitUtil.java | 9 +- .../dataverse/cache/CacheFactoryBeanTest.java | 147 ++++++++++++------ 5 files changed, 134 insertions(+), 104 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index c60953c66f5..98513024160 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1387,10 +1387,12 @@ Note: If either of these settings exist in the database rate limiting will be en - RateLimitingDefaultCapacityTiers is the number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... A value of -1 can be used to signify no rate limit. Also, by default, a tier not defined would receive a default of no limit. - ``curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'`` +.. code-block:: bash + curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000' - RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. +.. code-block:: bash curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' .. _Branding Your Installation: diff --git a/pom.xml b/pom.xml index 529d2fa35c3..0544c29fa15 100644 --- a/pom.xml +++ b/pom.xml @@ -543,14 +543,9 @@ 2.0.0 - com.hazelcast - hazelcast - provided - - - xerces - xercesImpl - 2.11.0 + javax.cache + cache-api + 1.1.1 @@ -663,6 +658,12 @@ 3.9.0 test + + com.hazelcast + hazelcast + 5.3.6 + test + diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java index 4282a77b6af..2c3eabd9c4e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.cache; -import com.hazelcast.core.HazelcastInstance; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.annotation.PostConstruct; @@ -9,26 +8,33 @@ import jakarta.ejb.Startup; import jakarta.inject.Inject; +import javax.cache.Cache; +import javax.cache.CacheManager; +import javax.cache.configuration.CompleteConfiguration; +import javax.cache.configuration.MutableConfiguration; +import javax.cache.spi.CachingProvider; import java.util.logging.Logger; -import java.util.Map; @Singleton @Startup public class CacheFactoryBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(CacheFactoryBean.class.getCanonicalName()); // Retrieved from Hazelcast, implements ConcurrentMap and is threadsafe - private Map rateLimitCache; + Cache rateLimitCache; @EJB SystemConfig systemConfig; @Inject - HazelcastInstance hzInstance; + CacheManager manager; + @Inject + CachingProvider provider; public final static String RATE_LIMIT_CACHE = "rateLimitCache"; @PostConstruct public void init() { - logger.info("Hazelcast member:" + hzInstance.getCluster().getLocalMember()); - rateLimitCache = hzInstance.getMap(RATE_LIMIT_CACHE); - logger.info("Rate Limit Cache Size: " + rateLimitCache.size()); + CompleteConfiguration config = + new MutableConfiguration() + .setTypes( String.class, String.class ); + rateLimitCache = manager.createCache(RATE_LIMIT_CACHE, config); } /** @@ -46,45 +52,4 @@ public boolean checkRate(User user, String action) { return (!RateLimitUtil.rateLimited(rateLimitCache, cacheKey, capacity)); } } - - public long getCacheSize(String cacheName) { - long cacheSize = 0; - switch (cacheName) { - case RATE_LIMIT_CACHE: - cacheSize = rateLimitCache.size(); - break; - default: - break; - } - return cacheSize; - } - public Object getCacheValue(String cacheName, String key) { - Object cacheValue = null; - switch (cacheName) { - case RATE_LIMIT_CACHE: - cacheValue = rateLimitCache.containsKey(key) ? rateLimitCache.get(key) : ""; - break; - default: - break; - } - return cacheValue; - } - public void setCacheValue(String cacheName, String key, Object value) { - switch (cacheName) { - case RATE_LIMIT_CACHE: - rateLimitCache.put(key, (String) value); - break; - default: - break; - } - } - public void clearCache(String cacheName) { - switch (cacheName) { - case RATE_LIMIT_CACHE: - rateLimitCache.clear(); - break; - default: - break; - } - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java index b710138865f..6d4c8352ce1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java @@ -10,6 +10,7 @@ import jakarta.json.JsonObject; import jakarta.json.JsonReader; +import javax.cache.Cache; import java.io.StringReader; import java.util.*; import java.util.concurrent.ConcurrentHashMap; @@ -43,7 +44,7 @@ static int getCapacity(SystemConfig systemConfig, User user, String action) { getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : getCapacityByTierAndAction(systemConfig, 0, action); } - static boolean rateLimited(final Map rateLimitCache, final String key, int capacityPerHour) { + static boolean rateLimited(final Cache rateLimitCache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; } @@ -95,6 +96,7 @@ static void init(SystemConfig systemConfig) { for default if no action defined: "{tier}:" and the value is the default limit for the tier for each action: "{tier}:{action}" and the value is the limit defined in the setting */ + rateLimitMap.clear(); rateLimits.forEach(r -> { r.setDefaultLimit(getCapacityByTier(systemConfig, r.getTier())); rateLimitMap.put(getMapKey(r.getTier()), r.getDefaultLimitPerHour()); @@ -103,7 +105,8 @@ static void init(SystemConfig systemConfig) { } static void getRateLimitsFromJson(SystemConfig systemConfig) { String setting = systemConfig.getRateLimitsJson(); - if (!setting.isEmpty() && rateLimits.isEmpty()) { + rateLimits.clear(); + if (!setting.isEmpty()) { try { JsonReader jr = Json.createReader(new StringReader(setting)); JsonObject obj= jr.readObject(); @@ -127,7 +130,7 @@ static String getMapKey(int tier, String action) { } return key.toString(); } - static long longFromKey(Map cache, String key) { + static long longFromKey(Cache cache, String key) { Object l = cache.get(key); return l != null ? Long.parseLong(String.valueOf(l)) : 0L; } diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index fd05f216eb0..36e0c42e3ed 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -2,7 +2,9 @@ import com.hazelcast.cluster.Address; import com.hazelcast.config.Config; -import com.hazelcast.core.*; +import com.hazelcast.core.Hazelcast; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.map.IMap; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -14,7 +16,18 @@ import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; +import javax.cache.Cache; +import javax.cache.CacheManager; +import javax.cache.configuration.CacheEntryListenerConfiguration; +import javax.cache.configuration.Configuration; +import javax.cache.integration.CompletionListener; +import javax.cache.processor.EntryProcessor; +import javax.cache.processor.EntryProcessorException; +import javax.cache.processor.EntryProcessorResult; import java.io.IOException; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; import java.util.UUID; import java.util.logging.Logger; @@ -28,8 +41,7 @@ public class CacheFactoryBeanTest { private static final Logger logger = Logger.getLogger(CacheFactoryBeanTest.class.getCanonicalName()); private SystemConfig mockedSystemConfig; static CacheFactoryBean cache = null; - // Second instance for cluster testing - static CacheFactoryBean cache2 = null; + AuthenticatedUser authUser = new AuthenticatedUser(); GuestUser guestUser = GuestUser.get(); String action; @@ -86,23 +98,13 @@ public void init() throws IOException { doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); cache = new CacheFactoryBean(); cache.systemConfig = mockedSystemConfig; - if (cache.hzInstance == null) { - cache.hzInstance = Hazelcast.newHazelcastInstance(getConfig()); + if (cache.rateLimitCache == null) { + cache.rateLimitCache = new TestCache(getConfig()); } - cache.init(); // PostConstruct - set up Hazelcast // Clear the static data, so it can be reloaded with the new mocked data RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); - - // Testing cache implementation and code coverage - final String cacheKey = "CacheTestKey" + UUID.randomUUID(); - final String cacheValue = "CacheTestValue" + UUID.randomUUID(); - long cacheSize = cache.getCacheSize(cache.RATE_LIMIT_CACHE); - cache.setCacheValue(cache.RATE_LIMIT_CACHE, cacheKey,cacheValue); - assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > cacheSize); - Object cacheValueObj = cache.getCacheValue(cache.RATE_LIMIT_CACHE, cacheKey); - assertTrue(cacheValueObj != null && cacheValue.equalsIgnoreCase((String) cacheValueObj)); } // Reset to default auth user @@ -116,12 +118,7 @@ public void init() throws IOException { @AfterAll public static void cleanup() { - if (cache != null && cache.hzInstance != null) { - cache.hzInstance.shutdown(); - } - if (cache2 != null && cache2.hzInstance != null) { - cache2.hzInstance.shutdown(); - } + Hazelcast.shutdownAll(); } @Test public void testGuestUserGettingRateLimited() { @@ -133,7 +130,8 @@ public void testGuestUserGettingRateLimited() { break; } } - assertTrue(cache.getCacheSize(cache.RATE_LIMIT_CACHE) > 0); + String key = RateLimitUtil.generateCacheKey(guestUser, action); + assertTrue(cache.rateLimitCache.containsKey(key)); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } @@ -189,41 +187,34 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio public void testCluster() { // Make sure at least 1 entry is in the original cache cache.checkRate(authUser, action); + String key = RateLimitUtil.generateCacheKey(authUser, action); // Create a second cache to test cluster - cache2 = new CacheFactoryBean(); + CacheFactoryBean cache2 = new CacheFactoryBean(); cache2.systemConfig = mockedSystemConfig; - if (cache2.hzInstance == null) { - // Needed for Jenkins to form cluster based on TcpIp since Multicast fails - Address initialCache = cache.hzInstance.getCluster().getLocalMember().getAddress(); - String members = String.format("%s:%d", initialCache.getHost(),initialCache.getPort()); - logger.info("Switching to TcpIp mode with members: " + members); - cache2.hzInstance = Hazelcast.newHazelcastInstance(getConfig(members)); - } - cache2.init(); // PostConstruct - set up Hazelcast + // join cluster with original Hazelcast instance + cache2.rateLimitCache = new TestCache(getConfig(cache.rateLimitCache.get("memberAddress"))); // Check to see if the new cache synced with the existing cache - long s1 = cache.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); - long s2 = cache2.getCacheSize(CacheFactoryBean.RATE_LIMIT_CACHE); - assertTrue(s1 > 0 && s1 == s2, "Size1:" + s1 + " Size2:" + s2 ); + assertTrue(cache.rateLimitCache.get(key).equals(cache2.rateLimitCache.get(key))); - String key = "key1"; + key = "key1"; String value = "value1"; // Verify that both caches stay in sync - cache.setCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key, value); - assertTrue(value.equals(cache2.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); + cache.rateLimitCache.put(key, value); + assertTrue(value.equals(cache2.rateLimitCache.get(key))); // Clearing one cache also clears the other cache in the cluster - cache2.clearCache(CacheFactoryBean.RATE_LIMIT_CACHE); - assertTrue(String.valueOf(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key)).isEmpty()); + cache2.rateLimitCache.clear(); + assertTrue(cache.rateLimitCache.get(key) == null); // Verify no issue dropping one node from cluster - cache2.setCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key, value); - assertTrue(value.equals(cache2.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); - assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); + cache2.rateLimitCache.put(key, value); + assertTrue(value.equals(cache2.rateLimitCache.get(key))); + assertTrue(value.equals(cache.rateLimitCache.get(key))); // Shut down hazelcast on cache2 and make sure data is still available in original cache - cache2.hzInstance.shutdown(); + cache2.rateLimitCache.close(); cache2 = null; - assertTrue(value.equals(cache.getCacheValue(CacheFactoryBean.RATE_LIMIT_CACHE, key))); + assertTrue(value.equals(cache.rateLimitCache.get(key))); } private Config getConfig() { @@ -242,4 +233,72 @@ private Config getConfig(String members) { } return config; } + + // convert Hazelcast IMap to JCache Cache + private class TestCache implements Cache{ + HazelcastInstance hzInstance; + IMap cache; + TestCache(Config config) { + hzInstance = Hazelcast.newHazelcastInstance(config); + cache = hzInstance.getMap("test"); + Address address = hzInstance.getCluster().getLocalMember().getAddress(); + cache.put("memberAddress", String.format("%s:%d", address.getHost(), address.getPort())); + } + @Override + public String get(String s) {return cache.get(s);} + @Override + public Map getAll(Set set) {return null;} + @Override + public boolean containsKey(String s) {return get(s) != null;} + @Override + public void loadAll(Set set, boolean b, CompletionListener completionListener) {} + @Override + public void put(String s, String s2) {cache.put(s,s2);} + @Override + public String getAndPut(String s, String s2) {return null;} + @Override + public void putAll(Map map) {} + @Override + public boolean putIfAbsent(String s, String s2) {return false;} + @Override + public boolean remove(String s) {return false;} + @Override + public boolean remove(String s, String s2) {return false;} + @Override + public String getAndRemove(String s) {return null;} + @Override + public boolean replace(String s, String s2, String v1) {return false;} + @Override + public boolean replace(String s, String s2) {return false;} + @Override + public String getAndReplace(String s, String s2) {return null;} + @Override + public void removeAll(Set set) {} + @Override + public void removeAll() {} + @Override + public void clear() {cache.clear();} + @Override + public > C getConfiguration(Class aClass) {return null;} + @Override + public T invoke(String s, EntryProcessor entryProcessor, Object... objects) throws EntryProcessorException {return null;} + @Override + public Map> invokeAll(Set set, EntryProcessor entryProcessor, Object... objects) {return null;} + @Override + public String getName() {return null;} + @Override + public CacheManager getCacheManager() {return null;} + @Override + public void close() {hzInstance.shutdown();} + @Override + public boolean isClosed() {return false;} + @Override + public T unwrap(Class aClass) {return null;} + @Override + public void registerCacheEntryListener(CacheEntryListenerConfiguration cacheEntryListenerConfiguration) {} + @Override + public void deregisterCacheEntryListener(CacheEntryListenerConfiguration cacheEntryListenerConfiguration) {} + @Override + public Iterator> iterator() {return null;} + } } From 669d273b6c8f25e5c53727635b695028f5eaef49 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 6 Feb 2024 12:39:38 -0500 Subject: [PATCH 0887/1112] review comments re: JCache --- .../source/installation/config.rst | 4 + .../dataverse/cache/CacheFactoryBeanTest.java | 112 +++++++++++++----- 2 files changed, 88 insertions(+), 28 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 98513024160..41411b5dfee 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1387,12 +1387,16 @@ Note: If either of these settings exist in the database rate limiting will be en - RateLimitingDefaultCapacityTiers is the number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... A value of -1 can be used to signify no rate limit. Also, by default, a tier not defined would receive a default of no limit. + .. code-block:: bash + curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000' - RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. + .. code-block:: bash + curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' .. _Branding Your Installation: diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java index 36e0c42e3ed..59027913dee 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java @@ -245,60 +245,116 @@ private class TestCache implements Cache{ cache.put("memberAddress", String.format("%s:%d", address.getHost(), address.getPort())); } @Override - public String get(String s) {return cache.get(s);} + public String get(String s) { + return cache.get(s); + } @Override - public Map getAll(Set set) {return null;} + public Map getAll(Set set) { + return null; + } @Override - public boolean containsKey(String s) {return get(s) != null;} + public boolean containsKey(String s) { + return get(s) != null; + } @Override - public void loadAll(Set set, boolean b, CompletionListener completionListener) {} + public void loadAll(Set set, boolean b, CompletionListener completionListener) { + + } @Override - public void put(String s, String s2) {cache.put(s,s2);} + public void put(String s, String s2) { + cache.put(s,s2); + } @Override - public String getAndPut(String s, String s2) {return null;} + public String getAndPut(String s, String s2) { + return null; + } @Override - public void putAll(Map map) {} + public void putAll(Map map) { + + } @Override - public boolean putIfAbsent(String s, String s2) {return false;} + public boolean putIfAbsent(String s, String s2) { + return false; + } @Override - public boolean remove(String s) {return false;} + public boolean remove(String s) { + return false; + } @Override - public boolean remove(String s, String s2) {return false;} + public boolean remove(String s, String s2) { + return false; + } @Override - public String getAndRemove(String s) {return null;} + public String getAndRemove(String s) { + return null; + } @Override - public boolean replace(String s, String s2, String v1) {return false;} + public boolean replace(String s, String s2, String v1) { + return false; + } @Override - public boolean replace(String s, String s2) {return false;} + public boolean replace(String s, String s2) { + return false; + } @Override - public String getAndReplace(String s, String s2) {return null;} + public String getAndReplace(String s, String s2) { + return null; + } @Override - public void removeAll(Set set) {} + public void removeAll(Set set) { + + } @Override - public void removeAll() {} + public void removeAll() { + + } @Override - public void clear() {cache.clear();} + public void clear() { + cache.clear(); + } @Override - public > C getConfiguration(Class aClass) {return null;} + public > C getConfiguration(Class aClass) { + return null; + } @Override - public T invoke(String s, EntryProcessor entryProcessor, Object... objects) throws EntryProcessorException {return null;} + public T invoke(String s, EntryProcessor entryProcessor, Object... objects) throws EntryProcessorException { + return null; + } @Override - public Map> invokeAll(Set set, EntryProcessor entryProcessor, Object... objects) {return null;} + public Map> invokeAll(Set set, EntryProcessor entryProcessor, Object... objects) { + return null; + } @Override - public String getName() {return null;} + public String getName() { + return null; + } @Override - public CacheManager getCacheManager() {return null;} + public CacheManager getCacheManager() { + return null; + } @Override - public void close() {hzInstance.shutdown();} + public void close() { + hzInstance.shutdown(); + } @Override - public boolean isClosed() {return false;} + public boolean isClosed() { + return false; + } @Override - public T unwrap(Class aClass) {return null;} + public T unwrap(Class aClass) { + return null; + } @Override - public void registerCacheEntryListener(CacheEntryListenerConfiguration cacheEntryListenerConfiguration) {} + public void registerCacheEntryListener(CacheEntryListenerConfiguration cacheEntryListenerConfiguration) { + + } @Override - public void deregisterCacheEntryListener(CacheEntryListenerConfiguration cacheEntryListenerConfiguration) {} + public void deregisterCacheEntryListener(CacheEntryListenerConfiguration cacheEntryListenerConfiguration) { + + } @Override - public Iterator> iterator() {return null;} + public Iterator> iterator() { + return null; + } } } From 9800fc16bf6827b08ad9b040e07f8166328be0a6 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 6 Feb 2024 15:13:15 -0500 Subject: [PATCH 0888/1112] doc change --- doc/release-notes/9356-rate-limiting.md | 2 +- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 3281e80beed..b05fa5e2131 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -1,4 +1,4 @@ -## Rate Limiting using JCache (with Hazelcast as a provider) +## Rate Limiting using JCache (with Hazelcast as provided by Payara) The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 41411b5dfee..2022987cae2 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1375,8 +1375,8 @@ Before being moved there, .. _cache-rate-limiting: -Configure Your Dataverse Installation to use JCache (with Hazelcast as a provider) for Rate Limiting ----------------------------------------------------------------------------------------------------- +Configure Your Dataverse Installation to use JCache (with Hazelcast as provided by Payara) for Rate Limiting +------------------------------------------------------------------------------------------------------------ Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. From ae0ec5a3f8a697c0969c4e641a920fd54823f742 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 7 Feb 2024 15:46:07 -0500 Subject: [PATCH 0889/1112] fix bad merge --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index b388e978808..3f2f36ea36a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1154,8 +1154,8 @@ public boolean isStoringIngestedFilesWithHeaders() { return settingsService.isTrueForKey(SettingsServiceBean.Key.StoreIngestedTabularFilesWithVarHeaders, false); } - /* - RateLimitUtil will parse the json to create a List + /** + * RateLimitUtil will parse the json to create a List */ public String getRateLimitsJson() { return settingsService.getValueForKey(SettingsServiceBean.Key.RateLimitingCapacityByTierAndAction, ""); From 5e507a020224af856ea505e5da97b7f6d7e3285a Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 7 Feb 2024 15:53:52 -0500 Subject: [PATCH 0890/1112] moving cache to util/cache --- src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java | 2 +- .../iq/dataverse/{ => util}/cache/CacheFactoryBean.java | 2 +- .../iq/dataverse/{ => util}/cache/RateLimitSetting.java | 2 +- .../harvard/iq/dataverse/{ => util}/cache/RateLimitUtil.java | 2 +- .../iq/dataverse/{ => util}/cache/CacheFactoryBeanTest.java | 2 +- .../iq/dataverse/{ => util}/cache/RateLimitUtilTest.java | 3 ++- 6 files changed, 7 insertions(+), 6 deletions(-) rename src/main/java/edu/harvard/iq/dataverse/{ => util}/cache/CacheFactoryBean.java (97%) rename src/main/java/edu/harvard/iq/dataverse/{ => util}/cache/RateLimitSetting.java (96%) rename src/main/java/edu/harvard/iq/dataverse/{ => util}/cache/RateLimitUtil.java (99%) rename src/test/java/edu/harvard/iq/dataverse/{ => util}/cache/CacheFactoryBeanTest.java (99%) rename src/test/java/edu/harvard/iq/dataverse/{ => util}/cache/RateLimitUtilTest.java (98%) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 8636172b731..553e2d7497e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -4,7 +4,7 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; -import edu.harvard.iq.dataverse.cache.CacheFactoryBean; +import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean; import edu.harvard.iq.dataverse.engine.DataverseEngine; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java similarity index 97% rename from src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java rename to src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java index 2c3eabd9c4e..384391e200b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java @@ -1,4 +1,4 @@ -package edu.harvard.iq.dataverse.cache; +package edu.harvard.iq.dataverse.util.cache; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java similarity index 96% rename from src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java rename to src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java index 752f9860127..cf9c9a5410e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitSetting.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java @@ -1,4 +1,4 @@ -package edu.harvard.iq.dataverse.cache; +package edu.harvard.iq.dataverse.util.cache; import jakarta.json.bind.annotation.JsonbProperty; diff --git a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java similarity index 99% rename from src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java rename to src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java index 6d4c8352ce1..64c86b0f25f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java @@ -1,4 +1,4 @@ -package edu.harvard.iq.dataverse.cache; +package edu.harvard.iq.dataverse.util.cache; import com.google.gson.Gson; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java similarity index 99% rename from src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java rename to src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java index 59027913dee..b271ec42b82 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java @@ -1,4 +1,4 @@ -package edu.harvard.iq.dataverse.cache; +package edu.harvard.iq.dataverse.util.cache; import com.hazelcast.cluster.Address; import com.hazelcast.config.Config; diff --git a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java similarity index 98% rename from src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java rename to src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java index 033f9dbb67e..23ba3673252 100644 --- a/src/test/java/edu/harvard/iq/dataverse/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java @@ -1,9 +1,10 @@ -package edu.harvard.iq.dataverse.cache; +package edu.harvard.iq.dataverse.util.cache; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.cache.RateLimitUtil; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; From 0774223c6e8982fd0c04629735db8b218605ed1e Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 7 Feb 2024 16:05:04 -0500 Subject: [PATCH 0891/1112] review comments fixed --- .../edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java index 64c86b0f25f..09057c13ab8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java @@ -12,7 +12,10 @@ import javax.cache.Cache; import java.io.StringReader; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.logging.Logger; From d5b1fb5617b096068fa10f11ea3112470bbadab3 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 9 Feb 2024 14:00:30 -0500 Subject: [PATCH 0892/1112] rename db script --- ...add-rate-limiting.sql => V6.1.0.3__9356-add-rate-limiting.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.1.0.2__9356-add-rate-limiting.sql => V6.1.0.3__9356-add-rate-limiting.sql} (100%) diff --git a/src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.3__9356-add-rate-limiting.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.2__9356-add-rate-limiting.sql rename to src/main/resources/db/migration/V6.1.0.3__9356-add-rate-limiting.sql From 54f1077196e4d1603a5cc538d7e4ec477c572dc5 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 20 Feb 2024 13:20:17 -0500 Subject: [PATCH 0893/1112] review comments --- .../examples/rate-limit-actions-setting.json | 42 +++++ .../source/installation/config.rst | 16 +- pom.xml | 2 - .../iq/dataverse/EjbDataverseEngine.java | 2 +- .../util/cache/CacheFactoryBean.java | 6 +- .../util/cache/CacheFactoryBeanTest.java | 140 ++++++---------- .../util/cache/RateLimitUtilTest.java | 155 ++++++++++-------- 7 files changed, 203 insertions(+), 160 deletions(-) create mode 100644 doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json diff --git a/doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json b/doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json new file mode 100644 index 00000000000..1086d0bd51f --- /dev/null +++ b/doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json @@ -0,0 +1,42 @@ +{ + "rateLimits": [ + { + "tier": 0, + "limitPerHour": 10, + "actions": [ + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand" + ] + }, + { + "tier": 0, + "limitPerHour": 1, + "actions": [ + "CreateGuestbookResponseCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + }, + { + "tier": 1, + "limitPerHour": 30, + "actions": [ + "CreateGuestbookResponseCommand", + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + } + ] +} \ No newline at end of file diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2022987cae2..4f6d05d2639 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1394,7 +1394,7 @@ Note: If either of these settings exist in the database rate limiting will be en - RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. - + :download:`rate-limit-actions.json ` .. code-block:: bash curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' @@ -4521,3 +4521,17 @@ tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed file on the fly. The setting is ``false`` by default, preserving the legacy behavior. + +:RateLimitingDefaultCapacityTiers ++++++++++++++++++++++++++++++++++ +Number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... +A value of -1 can be used to signify no rate limit. Also, by default, a tier not defined would receive a default of no limit. + +:RateLimitingCapacityByTierAndAction +++++++++++++++++++++++++++++++++++++ +Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. +In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. +{"rateLimits":[ +{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, +{"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, +{"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]} diff --git a/pom.xml b/pom.xml index 0544c29fa15..8c4c2b3c4b8 100644 --- a/pom.xml +++ b/pom.xml @@ -545,7 +545,6 @@ javax.cache cache-api - 1.1.1 @@ -661,7 +660,6 @@ com.hazelcast hazelcast - 5.3.6 test diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 553e2d7497e..bb3fa475847 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -207,7 +207,7 @@ public R submit(Command aCommand) throws CommandException { try { logRec.setUserIdentifier( aCommand.getRequest().getUser().getIdentifier() ); // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing. - if (!cacheFactory.checkRate(aCommand.getRequest().getUser(), aCommand.getClass().getSimpleName())) { + if (!cacheFactory.checkRate(aCommand.getRequest().getUser(), aCommand)) { throw new RateLimitCommandException(BundleUtil.getStringFromBundle("command.exception.user.ratelimited", Arrays.asList(aCommand.getClass().getSimpleName())), aCommand); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java index 384391e200b..c2781f3f4b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.util.cache; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.util.SystemConfig; import jakarta.annotation.PostConstruct; import jakarta.ejb.EJB; @@ -40,10 +41,11 @@ public void init() { /** * Check if user can make this call or if they are rate limited * @param user - * @param action + * @param command * @return true if user is superuser or rate not limited */ - public boolean checkRate(User user, String action) { + public boolean checkRate(User user, Command command) { + final String action = command.getClass().getSimpleName(); int capacity = RateLimitUtil.getCapacity(systemConfig, user, action); if (capacity == RateLimitUtil.NO_LIMIT) { return true; diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java index b271ec42b82..e4162f20ce3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java @@ -7,6 +7,10 @@ import com.hazelcast.map.IMap; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.engine.command.Command; +import edu.harvard.iq.dataverse.engine.command.impl.ListDataverseContentCommand; +import edu.harvard.iq.dataverse.engine.command.impl.ListExplicitGroupsCommand; +import edu.harvard.iq.dataverse.engine.command.impl.ListFacetsCommand; import edu.harvard.iq.dataverse.util.SystemConfig; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; @@ -28,8 +32,6 @@ import java.util.Iterator; import java.util.Map; import java.util.Set; -import java.util.UUID; -import java.util.logging.Logger; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; @@ -38,64 +40,64 @@ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class CacheFactoryBeanTest { - private static final Logger logger = Logger.getLogger(CacheFactoryBeanTest.class.getCanonicalName()); private SystemConfig mockedSystemConfig; static CacheFactoryBean cache = null; AuthenticatedUser authUser = new AuthenticatedUser(); GuestUser guestUser = GuestUser.get(); - String action; static final String settingDefaultCapacity = "30,60,120"; - static final String settingJson = "{\n" + - " \"rateLimits\":[\n" + - " {\n" + - " \"tier\": 0,\n" + - " \"limitPerHour\": 10,\n" + - " \"actions\": [\n" + - " \"GetLatestPublishedDatasetVersionCommand\",\n" + - " \"GetPrivateUrlCommand\",\n" + - " \"GetDatasetCommand\",\n" + - " \"GetLatestAccessibleDatasetVersionCommand\"\n" + - " ]\n" + - " },\n" + - " {\n" + - " \"tier\": 0,\n" + - " \"limitPerHour\": 1,\n" + - " \"actions\": [\n" + - " \"CreateGuestbookResponseCommand\",\n" + - " \"UpdateDatasetVersionCommand\",\n" + - " \"DestroyDatasetCommand\",\n" + - " \"DeleteDataFileCommand\",\n" + - " \"FinalizeDatasetPublicationCommand\",\n" + - " \"PublishDatasetCommand\"\n" + - " ]\n" + - " },\n" + - " {\n" + - " \"tier\": 1,\n" + - " \"limitPerHour\": 30,\n" + - " \"actions\": [\n" + - " \"CreateGuestbookResponseCommand\",\n" + - " \"GetLatestPublishedDatasetVersionCommand\",\n" + - " \"GetPrivateUrlCommand\",\n" + - " \"GetDatasetCommand\",\n" + - " \"GetLatestAccessibleDatasetVersionCommand\",\n" + - " \"UpdateDatasetVersionCommand\",\n" + - " \"DestroyDatasetCommand\",\n" + - " \"DeleteDataFileCommand\",\n" + - " \"FinalizeDatasetPublicationCommand\",\n" + - " \"PublishDatasetCommand\"\n" + - " ]\n" + - " }\n" + - " ]\n" + - "}"; - + public String getJsonSetting() { + return """ + { + "rateLimits": [ + { + "tier": 0, + "limitPerHour": 10, + "actions": [ + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand" + ] + }, + { + "tier": 0, + "limitPerHour": 1, + "actions": [ + "CreateGuestbookResponseCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + }, + { + "tier": 1, + "limitPerHour": 30, + "actions": [ + "CreateGuestbookResponseCommand", + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + } + ] + }"""; + } @BeforeEach public void init() throws IOException { // Reuse cache and config for all tests if (cache == null) { mockedSystemConfig = mock(SystemConfig.class); doReturn(settingDefaultCapacity).when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); - doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); + doReturn(getJsonSetting()).when(mockedSystemConfig).getRateLimitsJson(); cache = new CacheFactoryBean(); cache.systemConfig = mockedSystemConfig; if (cache.rateLimitCache == null) { @@ -111,9 +113,6 @@ public void init() throws IOException { authUser.setRateLimitTier(1); authUser.setSuperuser(false); authUser.setUserIdentifier("authUser"); - - // Create a unique action for each test - action = "cmd-" + UUID.randomUUID(); } @AfterAll @@ -122,6 +121,7 @@ public static void cleanup() { } @Test public void testGuestUserGettingRateLimited() { + Command action = new ListDataverseContentCommand(null,null); boolean rateLimited = false; int cnt = 0; for (; cnt <100; cnt++) { @@ -130,13 +130,14 @@ public void testGuestUserGettingRateLimited() { break; } } - String key = RateLimitUtil.generateCacheKey(guestUser, action); + String key = RateLimitUtil.generateCacheKey(guestUser, action.getClass().getSimpleName()); assertTrue(cache.rateLimitCache.containsKey(key)); assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); } @Test public void testAdminUserExemptFromGettingRateLimited() { + Command action = new ListExplicitGroupsCommand(null,null); authUser.setSuperuser(true); authUser.setUserIdentifier("admin"); boolean rateLimited = false; @@ -152,6 +153,7 @@ public void testAdminUserExemptFromGettingRateLimited() { @Test public void testAuthenticatedUserGettingRateLimited() throws InterruptedException { + Command action = new ListFacetsCommand(null,null); authUser.setRateLimitTier(2); // 120 cals per hour - 1 added token every 30 seconds boolean rateLimited = false; int cnt; @@ -183,40 +185,6 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio assertTrue(!rateLimited && cnt == 200, "rateLimited:"+rateLimited + " cnt:"+cnt); } - @Test - public void testCluster() { - // Make sure at least 1 entry is in the original cache - cache.checkRate(authUser, action); - String key = RateLimitUtil.generateCacheKey(authUser, action); - - // Create a second cache to test cluster - CacheFactoryBean cache2 = new CacheFactoryBean(); - cache2.systemConfig = mockedSystemConfig; - // join cluster with original Hazelcast instance - cache2.rateLimitCache = new TestCache(getConfig(cache.rateLimitCache.get("memberAddress"))); - - // Check to see if the new cache synced with the existing cache - assertTrue(cache.rateLimitCache.get(key).equals(cache2.rateLimitCache.get(key))); - - key = "key1"; - String value = "value1"; - // Verify that both caches stay in sync - cache.rateLimitCache.put(key, value); - assertTrue(value.equals(cache2.rateLimitCache.get(key))); - // Clearing one cache also clears the other cache in the cluster - cache2.rateLimitCache.clear(); - assertTrue(cache.rateLimitCache.get(key) == null); - - // Verify no issue dropping one node from cluster - cache2.rateLimitCache.put(key, value); - assertTrue(value.equals(cache2.rateLimitCache.get(key))); - assertTrue(value.equals(cache.rateLimitCache.get(key))); - // Shut down hazelcast on cache2 and make sure data is still available in original cache - cache2.rateLimitCache.close(); - cache2 = null; - assertTrue(value.equals(cache.rateLimitCache.get(key))); - } - private Config getConfig() { return getConfig(null); } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java index 23ba3673252..564b69c1402 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java @@ -4,10 +4,12 @@ import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.cache.RateLimitUtil; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; @@ -19,81 +21,99 @@ @MockitoSettings(strictness = Strictness.LENIENT) public class RateLimitUtilTest { - private SystemConfig mockedSystemConfig; + static SystemConfig mockedSystemConfig = mock(SystemConfig.class); + static SystemConfig mockedSystemConfigBad = mock(SystemConfig.class); - static final String settingJson = "{\n" + - " \"rateLimits\":[\n" + - " {\n" + - " \"tier\": 0,\n" + - " \"limitPerHour\": 10,\n" + - " \"actions\": [\n" + - " \"GetLatestPublishedDatasetVersionCommand\",\n" + - " \"GetPrivateUrlCommand\",\n" + - " \"GetDatasetCommand\",\n" + - " \"GetLatestAccessibleDatasetVersionCommand\"\n" + - " ]\n" + - " },\n" + - " {\n" + - " \"tier\": 0,\n" + - " \"limitPerHour\": 1,\n" + - " \"actions\": [\n" + - " \"CreateGuestbookResponseCommand\",\n" + - " \"UpdateDatasetVersionCommand\",\n" + - " \"DestroyDatasetCommand\",\n" + - " \"DeleteDataFileCommand\",\n" + - " \"FinalizeDatasetPublicationCommand\",\n" + - " \"PublishDatasetCommand\"\n" + - " ]\n" + - " },\n" + - " {\n" + - " \"tier\": 1,\n" + - " \"limitPerHour\": 30,\n" + - " \"actions\": [\n" + - " \"CreateGuestbookResponseCommand\",\n" + - " \"GetLatestPublishedDatasetVersionCommand\",\n" + - " \"GetPrivateUrlCommand\",\n" + - " \"GetDatasetCommand\",\n" + - " \"GetLatestAccessibleDatasetVersionCommand\",\n" + - " \"UpdateDatasetVersionCommand\",\n" + - " \"DestroyDatasetCommand\",\n" + - " \"DeleteDataFileCommand\",\n" + - " \"FinalizeDatasetPublicationCommand\",\n" + - " \"PublishDatasetCommand\"\n" + - " ]\n" + - " }\n" + - " ]\n" + - "}"; + static String getJsonSetting() { + return """ + { + "rateLimits": [ + { + "tier": 0, + "limitPerHour": 10, + "actions": [ + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand" + ] + }, + { + "tier": 0, + "limitPerHour": 1, + "actions": [ + "CreateGuestbookResponseCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + }, + { + "tier": 1, + "limitPerHour": 30, + "actions": [ + "CreateGuestbookResponseCommand", + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + } + ] + }"""; + } static final String settingJsonBad = "{\n"; + @BeforeAll + public static void setUp() { + doReturn(settingJsonBad).when(mockedSystemConfigBad).getRateLimitsJson(); + doReturn("100,200").when(mockedSystemConfigBad).getRateLimitingDefaultCapacityTiers(); + } @BeforeEach - public void setup() { - mockedSystemConfig = mock(SystemConfig.class); + public void resetSettings() { + doReturn(getJsonSetting()).when(mockedSystemConfig).getRateLimitsJson(); doReturn("100,200").when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); - // clear the static data so it can be reloaded with the new mocked data RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); } - @Test - public void testConfig() { - doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); - assertEquals(100, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 0)); - assertEquals(200, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 1)); - assertEquals(1, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 0, "DestroyDatasetCommand")); - assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 0, "Default Limit")); - - assertEquals(30, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); - assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 1, "Default Limit")); - - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 2, "Default No Limit")); + @ParameterizedTest + @CsvSource(value = { + "100,0,", + "200,1,", + "1,0,DestroyDatasetCommand", + "100,0,Default Limit", + "30,1,DestroyDatasetCommand", + "200,1,Default Limit", + "-1,2,Default No Limit" + }) + void testConfig(int exp, int tier, String action) { + if (action == null) { + assertEquals(exp, RateLimitUtil.getCapacityByTier(mockedSystemConfig, tier)); + } else { + assertEquals(exp, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, tier, action)); + } } - @Test - public void testBadJson() { - doReturn(settingJsonBad).when(mockedSystemConfig).getRateLimitsJson(); - assertEquals(100, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 0)); - assertEquals(200, RateLimitUtil.getCapacityByTier(mockedSystemConfig, 1)); - assertEquals(100, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 0, "GetLatestAccessibleDatasetVersionCommand")); - assertEquals(200, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 1, "GetLatestAccessibleDatasetVersionCommand")); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfig, 2, "GetLatestAccessibleDatasetVersionCommand")); + @ParameterizedTest + @CsvSource(value = { + "100,0,", + "200,1,", + "100,0,GetLatestAccessibleDatasetVersionCommand", + "200,1,GetLatestAccessibleDatasetVersionCommand", + "-1,2,GetLatestAccessibleDatasetVersionCommand" + }) + void testBadJson(int exp, int tier, String action) { + if (action == null) { + assertEquals(exp, RateLimitUtil.getCapacityByTier(mockedSystemConfigBad, tier)); + } else { + assertEquals(exp, RateLimitUtil.getCapacityByTierAndAction(mockedSystemConfigBad, tier, action)); + } } @Test @@ -103,7 +123,6 @@ public void testGenerateCacheKey() { } @Test public void testGetCapacity() { - doReturn(settingJson).when(mockedSystemConfig).getRateLimitsJson(); GuestUser guestUser = GuestUser.get(); assertEquals(10, RateLimitUtil.getCapacity(mockedSystemConfig, guestUser, "GetPrivateUrlCommand")); From d2d3b4a129e9fc9817ef9191effe22ea43b7893f Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 20 Feb 2024 15:31:05 -0500 Subject: [PATCH 0894/1112] fixing config.rst --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4f6d05d2639..c035d75b53a 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1394,7 +1394,7 @@ Note: If either of these settings exist in the database rate limiting will be en - RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. - :download:`rate-limit-actions.json ` +:download:`rate-limit-actions.json ` Example json for RateLimitingCapacityByTierAndAction .. code-block:: bash curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' From 3102b056b21c40c4da164fb020a8fcfa662caad2 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 20 Feb 2024 15:34:05 -0500 Subject: [PATCH 0895/1112] fixing config.rst --- doc/sphinx-guides/source/installation/config.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index c035d75b53a..7d51e006a36 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1394,7 +1394,9 @@ Note: If either of these settings exist in the database rate limiting will be en - RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. + :download:`rate-limit-actions.json ` Example json for RateLimitingCapacityByTierAndAction + .. code-block:: bash curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' From 736c633b162562e5277d24dea746b47dc06bc653 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 20 Feb 2024 17:17:22 -0500 Subject: [PATCH 0896/1112] more review comments --- .../authorization/users/AuthenticatedUser.java | 6 ++---- .../iq/dataverse/util/cache/RateLimitSetting.java | 9 --------- .../iq/dataverse/util/cache/RateLimitUtil.java | 13 +++++-------- ...ing.sql => V6.1.0.4__9356-add-rate-limiting.sql} | 0 .../dataverse/util/cache/CacheFactoryBeanTest.java | 10 +++++++--- 5 files changed, 14 insertions(+), 24 deletions(-) rename src/main/resources/db/migration/{V6.1.0.3__9356-add-rate-limiting.sql => V6.1.0.4__9356-add-rate-limiting.sql} (100%) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 6abcb350222..50a1be7635f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -148,20 +148,18 @@ public class AuthenticatedUser implements User, Serializable { @Transient private Set mutedNotificationsSet = new HashSet<>(); - private int rateLimitTier; + private int rateLimitTier = 1; @PrePersist void prePersist() { mutedNotifications = Type.toStringValue(mutedNotificationsSet); mutedEmails = Type.toStringValue(mutedEmailsSet); - rateLimitTier = max(1,rateLimitTier); // db column defaults to 1 (minimum value for a tier). } @PostLoad public void initialize() { mutedNotificationsSet = Type.tokenizeToSet(mutedNotifications); mutedEmailsSet = Type.tokenizeToSet(mutedEmails); - rateLimitTier = max(1,rateLimitTier); // db column defaults to 1 (minimum value for a tier). } /** @@ -407,7 +405,7 @@ public int getRateLimitTier() { return rateLimitTier; } public void setRateLimitTier(int rateLimitTier) { - this.rateLimitTier = rateLimitTier; + this.rateLimitTier = max(1,rateLimitTier); } @OneToOne(mappedBy = "authenticatedUser") diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java index cf9c9a5410e..1f781f99a64 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java @@ -7,38 +7,29 @@ public class RateLimitSetting { - @JsonbProperty("tier") private int tier; - @JsonbProperty("limitPerHour") private int limitPerHour = RateLimitUtil.NO_LIMIT; - @JsonbProperty("actions") private List actions = new ArrayList<>(); private int defaultLimitPerHour; public RateLimitSetting() {} - @JsonbProperty("tier") public void setTier(int tier) { this.tier = tier; } - @JsonbProperty("tier") public int getTier() { return this.tier; } - @JsonbProperty("limitPerHour") public void setLimitPerHour(int limitPerHour) { this.limitPerHour = limitPerHour; } - @JsonbProperty("limitPerHour") public int getLimitPerHour() { return this.limitPerHour; } - @JsonbProperty("actions") public void setActions(List actions) { this.actions = actions; } - @JsonbProperty("actions") public List getActions() { return this.actions; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java index 09057c13ab8..35cc1a5e451 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java @@ -1,14 +1,12 @@ package edu.harvard.iq.dataverse.util.cache; -import com.google.gson.Gson; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; -import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonObject; -import jakarta.json.JsonReader; +import jakarta.json.*; +import jakarta.json.bind.JsonbBuilder; +import jakarta.json.bind.JsonbException; import javax.cache.Cache; import java.io.StringReader; @@ -27,7 +25,6 @@ public class RateLimitUtil { private static final Logger logger = Logger.getLogger(RateLimitUtil.class.getCanonicalName()); static final List rateLimits = new CopyOnWriteArrayList<>(); static final Map rateLimitMap = new ConcurrentHashMap<>(); - private static final Gson gson = new Gson(); public static final int NO_LIMIT = -1; static String generateCacheKey(final User user, final String action) { @@ -114,9 +111,9 @@ static void getRateLimitsFromJson(SystemConfig systemConfig) { JsonReader jr = Json.createReader(new StringReader(setting)); JsonObject obj= jr.readObject(); JsonArray lst = obj.getJsonArray("rateLimits"); - rateLimits.addAll(gson.fromJson(String.valueOf(lst), + rateLimits.addAll(JsonbBuilder.create().fromJson(String.valueOf(lst), new ArrayList() {}.getClass().getGenericSuperclass())); - } catch (Exception e) { + } catch (JsonException | JsonbException e) { logger.warning("Unable to parse Rate Limit Json: " + e.getLocalizedMessage() + " Json:(" + setting + ")"); rateLimits.add(new RateLimitSetting()); // add a default entry to prevent re-initialization } diff --git a/src/main/resources/db/migration/V6.1.0.3__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.4__9356-add-rate-limiting.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.3__9356-add-rate-limiting.sql rename to src/main/resources/db/migration/V6.1.0.4__9356-add-rate-limiting.sql diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java index e4162f20ce3..7438d94ea41 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java @@ -12,8 +12,10 @@ import edu.harvard.iq.dataverse.engine.command.impl.ListExplicitGroupsCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListFacetsCommand; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.testing.Tags; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; @@ -33,12 +35,13 @@ import java.util.Map; import java.util.Set; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) +@Tag(Tags.NOT_ESSENTIAL_UNITTESTS) public class CacheFactoryBeanTest { private SystemConfig mockedSystemConfig; static CacheFactoryBean cache = null; @@ -163,7 +166,8 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio break; } } - assertTrue(rateLimited && cnt == 120, "rateLimited:"+rateLimited + " cnt:"+cnt); + assertTrue(rateLimited); + assertEquals(120, cnt); for (cnt = 0; cnt <60; cnt++) { Thread.sleep(1000);// Wait for bucket to be replenished (check each second for 1 minute max) @@ -172,7 +176,7 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio break; } } - assertTrue(!rateLimited, "rateLimited:"+rateLimited + " cnt:"+cnt); + assertFalse(rateLimited, "rateLimited:"+rateLimited + " cnt:"+cnt); // Now change the user's tier, so it is no longer limited authUser.setRateLimitTier(3); // tier 3 = no limit From ecea90c53c6c6b6782b2ca97ba038cd7dd0e03a5 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 21 Feb 2024 10:34:44 -0500 Subject: [PATCH 0897/1112] fixing tests --- .../dataverse/util/cache/RateLimitUtil.java | 16 +++++---- .../util/cache/RateLimitUtilTest.java | 35 +++++++++++-------- 2 files changed, 31 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java index 35cc1a5e451..54e87e1fcb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java @@ -1,13 +1,16 @@ package edu.harvard.iq.dataverse.util.cache; +import com.google.gson.Gson; +import com.google.gson.JsonParseException; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; -import jakarta.json.*; -import jakarta.json.bind.JsonbBuilder; -import jakarta.json.bind.JsonbException; - +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonException; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; import javax.cache.Cache; import java.io.StringReader; import java.util.ArrayList; @@ -111,9 +114,10 @@ static void getRateLimitsFromJson(SystemConfig systemConfig) { JsonReader jr = Json.createReader(new StringReader(setting)); JsonObject obj= jr.readObject(); JsonArray lst = obj.getJsonArray("rateLimits"); - rateLimits.addAll(JsonbBuilder.create().fromJson(String.valueOf(lst), + Gson gson = new Gson(); + rateLimits.addAll(gson.fromJson(String.valueOf(lst), new ArrayList() {}.getClass().getGenericSuperclass())); - } catch (JsonException | JsonbException e) { + } catch (JsonException | JsonParseException e) { logger.warning("Unable to parse Rate Limit Json: " + e.getLocalizedMessage() + " Json:(" + setting + ")"); rateLimits.add(new RateLimitSetting()); // add a default entry to prevent re-initialization } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java index 564b69c1402..fb1ba4c3c14 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java @@ -73,13 +73,13 @@ static String getJsonSetting() { @BeforeAll public static void setUp() { + doReturn(getJsonSetting()).when(mockedSystemConfig).getRateLimitsJson(); + doReturn("100,200").when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); doReturn(settingJsonBad).when(mockedSystemConfigBad).getRateLimitsJson(); doReturn("100,200").when(mockedSystemConfigBad).getRateLimitingDefaultCapacityTiers(); } @BeforeEach - public void resetSettings() { - doReturn(getJsonSetting()).when(mockedSystemConfig).getRateLimitsJson(); - doReturn("100,200").when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); + public void resetRateLimitUtilSettings() { RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); } @@ -123,25 +123,32 @@ public void testGenerateCacheKey() { } @Test public void testGetCapacity() { + SystemConfig config = mock(SystemConfig.class); + resetRateLimitUtil(config, true); + GuestUser guestUser = GuestUser.get(); - assertEquals(10, RateLimitUtil.getCapacity(mockedSystemConfig, guestUser, "GetPrivateUrlCommand")); + assertEquals(10, RateLimitUtil.getCapacity(config, guestUser, "GetPrivateUrlCommand")); AuthenticatedUser authUser = new AuthenticatedUser(); authUser.setRateLimitTier(1); - assertEquals(30, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); + assertEquals(30, RateLimitUtil.getCapacity(config, authUser, "GetPrivateUrlCommand")); authUser.setSuperuser(true); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(config, authUser, "GetPrivateUrlCommand")); // no setting means rate limiting is not on - doReturn("").when(mockedSystemConfig).getRateLimitsJson(); - doReturn("").when(mockedSystemConfig).getRateLimitingDefaultCapacityTiers(); + resetRateLimitUtil(config, false); + + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(config, guestUser, "GetPrivateUrlCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(config, guestUser, "xyz")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(config, authUser, "GetPrivateUrlCommand")); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(config, authUser, "abc")); + authUser.setRateLimitTier(99); + assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(config, authUser, "def")); + } + private void resetRateLimitUtil(SystemConfig config, boolean enable) { + doReturn(enable ? getJsonSetting() : "").when(config).getRateLimitsJson(); + doReturn(enable ? "100,200" : "").when(config).getRateLimitingDefaultCapacityTiers(); RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, guestUser, "GetPrivateUrlCommand")); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, guestUser, "xyz")); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "GetPrivateUrlCommand")); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "abc")); - authUser.setRateLimitTier(99); - assertEquals(RateLimitUtil.NO_LIMIT, RateLimitUtil.getCapacity(mockedSystemConfig, authUser, "def")); } } From 9d575ed40aa55a15cecd23f023cb7665a5404c0f Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 21 Feb 2024 11:28:55 -0500 Subject: [PATCH 0898/1112] fixing tests --- .../iq/dataverse/util/cache/CacheFactoryBeanTest.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java index 7438d94ea41..f7cf06b7d30 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java @@ -151,7 +151,8 @@ public void testAdminUserExemptFromGettingRateLimited() { break; } } - assertTrue(!rateLimited && cnt >= 99, "rateLimited:"+rateLimited + " cnt:"+cnt); + assertFalse(rateLimited); + assertTrue(cnt >= 99, "cnt:"+cnt); } @Test @@ -186,7 +187,8 @@ public void testAuthenticatedUserGettingRateLimited() throws InterruptedExceptio break; } } - assertTrue(!rateLimited && cnt == 200, "rateLimited:"+rateLimited + " cnt:"+cnt); + assertFalse(rateLimited); + assertEquals(200, cnt); } private Config getConfig() { From 692c65098a91b3fb822954eaba2d4ed9182151e3 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 22 Feb 2024 11:41:08 -0500 Subject: [PATCH 0899/1112] more review comments --- doc/release-notes/9356-rate-limiting.md | 2 +- .../examples/rate-limit-actions-setting.json | 6 +- .../source/installation/config.rst | 2 +- .../users/AuthenticatedUser.java | 9 +-- .../dataverse/util/cache/RateLimitUtil.java | 3 +- .../util/cache/CacheFactoryBeanTest.java | 80 +++++++++---------- .../util/cache/RateLimitUtilTest.java | 78 +++++++++--------- 7 files changed, 86 insertions(+), 94 deletions(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index b05fa5e2131..5433bc65ad8 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -15,6 +15,6 @@ Tiers not specified in this setting will default to `-1` (No Limit). `RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. -`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}'` +`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]'` Hazelcast is configured in Payara and should not need any changes for this feature \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json b/doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json index 1086d0bd51f..3dfc7648dc3 100644 --- a/doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json +++ b/doc/sphinx-guides/source/_static/installation/files/examples/rate-limit-actions-setting.json @@ -1,5 +1,4 @@ -{ - "rateLimits": [ +[ { "tier": 0, "limitPerHour": 10, @@ -38,5 +37,4 @@ "PublishDatasetCommand" ] } - ] -} \ No newline at end of file +] \ No newline at end of file diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 7d51e006a36..17dc6453a18 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1399,7 +1399,7 @@ Note: If either of these settings exist in the database rate limiting will be en .. code-block:: bash - curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '{"rateLimits":[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]}' + curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]' .. _Branding Your Installation: diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 50a1be7635f..893d7a65485 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -16,7 +16,6 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonPrinter; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; -import static java.lang.Math.max; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.io.Serializable; @@ -44,6 +43,7 @@ import jakarta.persistence.PostLoad; import jakarta.persistence.PrePersist; import jakarta.persistence.Transient; +import jakarta.validation.constraints.Min; import jakarta.validation.constraints.NotBlank; import jakarta.validation.constraints.NotNull; @@ -148,6 +148,8 @@ public class AuthenticatedUser implements User, Serializable { @Transient private Set mutedNotificationsSet = new HashSet<>(); + @Column(nullable=false) + @Min(value = 1, message = "Rate Limit Tier must be greater than 0.") private int rateLimitTier = 1; @PrePersist @@ -404,9 +406,7 @@ public void setDeactivatedTime(Timestamp deactivatedTime) { public int getRateLimitTier() { return rateLimitTier; } - public void setRateLimitTier(int rateLimitTier) { - this.rateLimitTier = max(1,rateLimitTier); - } + public void setRateLimitTier(int rateLimitTier) { this.rateLimitTier = rateLimitTier; } @OneToOne(mappedBy = "authenticatedUser") private AuthenticatedUserLookup authenticatedUserLookup; @@ -446,7 +446,6 @@ public void setShibIdentityProvider(String shibIdentityProvider) { public JsonObjectBuilder toJson() { //JsonObjectBuilder authenicatedUserJson = Json.createObjectBuilder(); - NullSafeJsonBuilder authenicatedUserJson = NullSafeJsonBuilder.jsonObjectBuilder(); authenicatedUserJson.add("id", this.id); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java index 54e87e1fcb2..68a3415e071 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java @@ -112,8 +112,7 @@ static void getRateLimitsFromJson(SystemConfig systemConfig) { if (!setting.isEmpty()) { try { JsonReader jr = Json.createReader(new StringReader(setting)); - JsonObject obj= jr.readObject(); - JsonArray lst = obj.getJsonArray("rateLimits"); + JsonArray lst = jr.readArray(); Gson gson = new Gson(); rateLimits.addAll(gson.fromJson(String.valueOf(lst), new ArrayList() {}.getClass().getGenericSuperclass())); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java index f7cf06b7d30..92fd6731e93 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java @@ -41,7 +41,6 @@ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) -@Tag(Tags.NOT_ESSENTIAL_UNITTESTS) public class CacheFactoryBeanTest { private SystemConfig mockedSystemConfig; static CacheFactoryBean cache = null; @@ -51,48 +50,46 @@ public class CacheFactoryBeanTest { static final String settingDefaultCapacity = "30,60,120"; public String getJsonSetting() { return """ + [ { - "rateLimits": [ - { - "tier": 0, - "limitPerHour": 10, - "actions": [ - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand" - ] - }, - { - "tier": 0, - "limitPerHour": 1, - "actions": [ - "CreateGuestbookResponseCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] - }, - { - "tier": 1, - "limitPerHour": 30, - "actions": [ - "CreateGuestbookResponseCommand", - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] - } + "tier": 0, + "limitPerHour": 10, + "actions": [ + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand" ] - }"""; + }, + { + "tier": 0, + "limitPerHour": 1, + "actions": [ + "CreateGuestbookResponseCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + }, + { + "tier": 1, + "limitPerHour": 30, + "actions": [ + "CreateGuestbookResponseCommand", + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + } + ]"""; } @BeforeEach public void init() throws IOException { @@ -156,6 +153,7 @@ public void testAdminUserExemptFromGettingRateLimited() { } @Test + @Tag(Tags.NOT_ESSENTIAL_UNITTESTS) public void testAuthenticatedUserGettingRateLimited() throws InterruptedException { Command action = new ListFacetsCommand(null,null); authUser.setRateLimitTier(2); // 120 cals per hour - 1 added token every 30 seconds diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java index fb1ba4c3c14..5ddcc190993 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java @@ -26,48 +26,46 @@ public class RateLimitUtilTest { static String getJsonSetting() { return """ + [ { - "rateLimits": [ - { - "tier": 0, - "limitPerHour": 10, - "actions": [ - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand" - ] - }, - { - "tier": 0, - "limitPerHour": 1, - "actions": [ - "CreateGuestbookResponseCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] - }, - { - "tier": 1, - "limitPerHour": 30, - "actions": [ - "CreateGuestbookResponseCommand", - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] - } + "tier": 0, + "limitPerHour": 10, + "actions": [ + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand" ] - }"""; + }, + { + "tier": 0, + "limitPerHour": 1, + "actions": [ + "CreateGuestbookResponseCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + }, + { + "tier": 1, + "limitPerHour": 30, + "actions": [ + "CreateGuestbookResponseCommand", + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] + } + ]"""; } static final String settingJsonBad = "{\n"; From 13674df5f24041cfbaa7a0f24082be18c853e917 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 22 Feb 2024 13:30:11 -0500 Subject: [PATCH 0900/1112] more review comments --- .../iq/dataverse/authorization/users/AuthenticatedUser.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 893d7a65485..d6d3e0317ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -406,7 +406,9 @@ public void setDeactivatedTime(Timestamp deactivatedTime) { public int getRateLimitTier() { return rateLimitTier; } - public void setRateLimitTier(int rateLimitTier) { this.rateLimitTier = rateLimitTier; } + public void setRateLimitTier(int rateLimitTier) { + this.rateLimitTier = rateLimitTier; + } @OneToOne(mappedBy = "authenticatedUser") private AuthenticatedUserLookup authenticatedUserLookup; From 1e4d3519ec5b26fa707c2bdb58f4e2777f39eb89 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 27 Feb 2024 10:11:39 -0500 Subject: [PATCH 0901/1112] review comments --- doc/release-notes/9356-rate-limiting.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 5433bc65ad8..9b3d38f950f 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -9,7 +9,7 @@ If neither setting exists rate limiting is disabled. `RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. -Tiers not specified in this setting will default to `-1` (No Limit). +Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." `curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` `RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). From 8edbc0473895e7db0fd7cff92a9707d6ab142829 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 27 Feb 2024 10:18:04 -0500 Subject: [PATCH 0902/1112] review comments --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 17dc6453a18..f7a16066839 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1386,7 +1386,7 @@ Two database settings configure the rate limiting. Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. - RateLimitingDefaultCapacityTiers is the number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... - A value of -1 can be used to signify no rate limit. Also, by default, a tier not defined would receive a default of no limit. + A value of -1 can be used to signify no rate limit. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." .. code-block:: bash From 4a0e0af55cc3f406b781a98f669703195d5066b0 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 29 Feb 2024 14:26:16 -0500 Subject: [PATCH 0903/1112] rename sql to unique --- ...add-rate-limiting.sql => V6.1.0.5__9356-add-rate-limiting.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.1.0.4__9356-add-rate-limiting.sql => V6.1.0.5__9356-add-rate-limiting.sql} (100%) diff --git a/src/main/resources/db/migration/V6.1.0.4__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.5__9356-add-rate-limiting.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.4__9356-add-rate-limiting.sql rename to src/main/resources/db/migration/V6.1.0.5__9356-add-rate-limiting.sql From 5233bf2fdd085ab7b0c0ef00a468cc11a623e593 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 29 Feb 2024 16:05:54 -0500 Subject: [PATCH 0904/1112] review comments --- doc/release-notes/9356-rate-limiting.md | 4 ++-- doc/sphinx-guides/source/installation/config.rst | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md index 9b3d38f950f..1d68669af26 100644 --- a/doc/release-notes/9356-rate-limiting.md +++ b/doc/release-notes/9356-rate-limiting.md @@ -7,12 +7,12 @@ Two database settings configure the rate limiting. Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. -`RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. +`:RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." `curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` -`RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). +`:RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. `curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]'` diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f7a16066839..460307241e9 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1385,14 +1385,14 @@ Rate limits can be imposed on command APIs by configuring the tier, the command, Two database settings configure the rate limiting. Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. -- RateLimitingDefaultCapacityTiers is the number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... +- :RateLimitingDefaultCapacityTiers is the number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... A value of -1 can be used to signify no rate limit. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." .. code-block:: bash curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000' -- RateLimitingCapacityByTierAndAction is a Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. +- :RateLimitingCapacityByTierAndAction is a JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. :download:`rate-limit-actions.json ` Example json for RateLimitingCapacityByTierAndAction @@ -4531,7 +4531,7 @@ A value of -1 can be used to signify no rate limit. Also, by default, a tier not :RateLimitingCapacityByTierAndAction ++++++++++++++++++++++++++++++++++++ -Json object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. +JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. {"rateLimits":[ {"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, From b66e0002bf3ebdab625e9285c813f980eca34a59 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Thu, 29 Feb 2024 16:10:38 -0500 Subject: [PATCH 0905/1112] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 460307241e9..70c1e40d76b 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1375,7 +1375,7 @@ Before being moved there, .. _cache-rate-limiting: -Configure Your Dataverse Installation to use JCache (with Hazelcast as provided by Payara) for Rate Limiting +Configure Your Dataverse Installation to Use JCache (with Hazelcast as Provided by Payara) for Rate Limiting ------------------------------------------------------------------------------------------------------------ Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. From 0b3c5e385c6aaeeb67392c89e100c6286ad90f71 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 1 Mar 2024 18:01:55 +0100 Subject: [PATCH 0906/1112] Cosmetics for 9356 - Rate Limiting PR (#10349) * style(cache): switch from Gson to JSON-B via JSR-367 Avoiding usage of GSON will eventually allow us to reduce dependencies. Standards for the win! * style(cache): address SonarLint suggestions for code improvements - Remove unnecessary StringBuffers - Switch to better readable else-if construction to determine capacity - Add missing generics - Remove stale import --- pom.xml | 12 ++++ .../util/cache/RateLimitSetting.java | 2 - .../dataverse/util/cache/RateLimitUtil.java | 64 +++++++++---------- 3 files changed, 41 insertions(+), 37 deletions(-) diff --git a/pom.xml b/pom.xml index 8c4c2b3c4b8..f736f04cf32 100644 --- a/pom.xml +++ b/pom.xml @@ -210,6 +210,18 @@ provided + + + jakarta.json.bind + jakarta.json.bind-api + + + + org.eclipse + yasson + test + + org.glassfish diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java index 1f781f99a64..54da5a46670 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitSetting.java @@ -1,7 +1,5 @@ package edu.harvard.iq.dataverse.util.cache; -import jakarta.json.bind.annotation.JsonbProperty; - import java.util.ArrayList; import java.util.List; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java index 68a3415e071..b566cd42fe1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java @@ -1,18 +1,14 @@ package edu.harvard.iq.dataverse.util.cache; -import com.google.gson.Gson; -import com.google.gson.JsonParseException; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.util.SystemConfig; -import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonException; -import jakarta.json.JsonObject; -import jakarta.json.JsonReader; +import jakarta.json.bind.Jsonb; +import jakarta.json.bind.JsonbBuilder; +import jakarta.json.bind.JsonbException; + import javax.cache.Cache; -import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -31,23 +27,19 @@ public class RateLimitUtil { public static final int NO_LIMIT = -1; static String generateCacheKey(final User user, final String action) { - StringBuffer id = new StringBuffer(); - id.append(user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()); - if (action != null) { - id.append(":").append(action); - } - return id.toString(); + return (user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()) + + (action != null ? ":" + action : ""); } static int getCapacity(SystemConfig systemConfig, User user, String action) { if (user != null && user.isSuperuser()) { return NO_LIMIT; - }; + } // get the capacity, i.e. calls per hour, from config - return (user instanceof AuthenticatedUser) ? - getCapacityByTierAndAction(systemConfig, ((AuthenticatedUser) user).getRateLimitTier(), action) : + return (user instanceof AuthenticatedUser authUser) ? + getCapacityByTierAndAction(systemConfig, authUser.getRateLimitTier(), action) : getCapacityByTierAndAction(systemConfig, 0, action); } - static boolean rateLimited(final Cache rateLimitCache, final String key, int capacityPerHour) { + static boolean rateLimited(final Cache rateLimitCache, final String key, int capacityPerHour) { if (capacityPerHour == NO_LIMIT) { return false; } @@ -73,10 +65,14 @@ static int getCapacityByTierAndAction(SystemConfig systemConfig, Integer tier, S if (rateLimits.isEmpty()) { init(systemConfig); } - - return rateLimitMap.containsKey(getMapKey(tier,action)) ? rateLimitMap.get(getMapKey(tier,action)) : - rateLimitMap.containsKey(getMapKey(tier)) ? rateLimitMap.get(getMapKey(tier)) : - getCapacityByTier(systemConfig, tier); + + if (rateLimitMap.containsKey(getMapKey(tier, action))) { + return rateLimitMap.get(getMapKey(tier,action)); + } else if (rateLimitMap.containsKey(getMapKey(tier))) { + return rateLimitMap.get(getMapKey(tier)); + } else { + return getCapacityByTier(systemConfig, tier); + } } static int getCapacityByTier(SystemConfig systemConfig, int tier) { int value = NO_LIMIT; @@ -106,19 +102,22 @@ static void init(SystemConfig systemConfig) { r.getActions().forEach(a -> rateLimitMap.put(getMapKey(r.getTier(), a), r.getLimitPerHour())); }); } + + @SuppressWarnings("java:S2133") // <- To enable casting to generic in JSON-B we need a class instance, false positive static void getRateLimitsFromJson(SystemConfig systemConfig) { String setting = systemConfig.getRateLimitsJson(); rateLimits.clear(); if (!setting.isEmpty()) { - try { - JsonReader jr = Json.createReader(new StringReader(setting)); - JsonArray lst = jr.readArray(); - Gson gson = new Gson(); - rateLimits.addAll(gson.fromJson(String.valueOf(lst), + try (Jsonb jsonb = JsonbBuilder.create()) { + rateLimits.addAll(jsonb.fromJson(setting, new ArrayList() {}.getClass().getGenericSuperclass())); - } catch (JsonException | JsonParseException e) { + } catch (JsonbException e) { logger.warning("Unable to parse Rate Limit Json: " + e.getLocalizedMessage() + " Json:(" + setting + ")"); rateLimits.add(new RateLimitSetting()); // add a default entry to prevent re-initialization + // Note: Usually using Exception in a catch block is an antipattern and should be avoided. + // As the JSON-B interface does not specify a non-generic type, we have to use this. + } catch (Exception e) { + logger.warning("Could not close JSON-B reader"); } } } @@ -126,14 +125,9 @@ static String getMapKey(int tier) { return getMapKey(tier, null); } static String getMapKey(int tier, String action) { - StringBuffer key = new StringBuffer(); - key.append(tier).append(":"); - if (action != null) { - key.append(action); - } - return key.toString(); + return tier + ":" + (action != null ? action : ""); } - static long longFromKey(Cache cache, String key) { + static long longFromKey(Cache cache, String key) { Object l = cache.get(key); return l != null ? Long.parseLong(String.valueOf(l)) : 0L; } From 226622519fc7b44b6ff49537985dec71b730b8fc Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 6 Mar 2024 13:03:41 -0500 Subject: [PATCH 0907/1112] rename sql file --- ...add-rate-limiting.sql => V6.1.0.6__9356-add-rate-limiting.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.1.0.5__9356-add-rate-limiting.sql => V6.1.0.6__9356-add-rate-limiting.sql} (100%) diff --git a/src/main/resources/db/migration/V6.1.0.5__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.6__9356-add-rate-limiting.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.5__9356-add-rate-limiting.sql rename to src/main/resources/db/migration/V6.1.0.6__9356-add-rate-limiting.sql From a1ab6f9e3b65919f79b85b4dd0bdccb70e8cb3a6 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Mon, 18 Mar 2024 13:49:41 -0400 Subject: [PATCH 0908/1112] change sql script name --- .../{V6.1.0.6__9356-add-rate-limiting.sql => V6.1.0.7.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.1.0.6__9356-add-rate-limiting.sql => V6.1.0.7.sql} (100%) diff --git a/src/main/resources/db/migration/V6.1.0.6__9356-add-rate-limiting.sql b/src/main/resources/db/migration/V6.1.0.7.sql similarity index 100% rename from src/main/resources/db/migration/V6.1.0.6__9356-add-rate-limiting.sql rename to src/main/resources/db/migration/V6.1.0.7.sql From 4a45caebfda51b8e445fd3b1936aeea05d54bd81 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 18 Mar 2024 15:53:24 -0400 Subject: [PATCH 0909/1112] Change to return null --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 400075cd899..fdb563e857b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -5819,7 +5819,7 @@ public String getCroissant() { return croissant; } } - return ""; + return null; } public String getJsonLd() { From d7fa076bc7b0bc3a39c40b6b08797d93372a1847 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 18 Mar 2024 16:45:48 -0400 Subject: [PATCH 0910/1112] Empty validation --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index fdb563e857b..beb4c1f9db2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -5814,7 +5814,7 @@ public String getCroissant() { final String CROISSANT_SCHEMA_NAME = "croissant"; ExportService instance = ExportService.getInstance(); String croissant = instance.getExportAsString(dataset, CROISSANT_SCHEMA_NAME); - if (croissant != null) { + if (croissant != null && !croissant.isEmpty()) { logger.fine("Returning cached CROISSANT."); return croissant; } From ff16a49bd9ca7ca9c5d57a9a1e1b8e06af855e0a Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Tue, 19 Mar 2024 10:40:52 +0100 Subject: [PATCH 0911/1112] semaphore for async indexing and sync index in transaction after publish --- .../source/installation/config.rst | 9 ++++++ .../FinalizeDatasetPublicationCommand.java | 7 ++++- .../iq/dataverse/search/IndexServiceBean.java | 29 +++++++++++++++++-- .../iq/dataverse/settings/JvmSettings.java | 4 +++ 4 files changed, 46 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2baa2827250..06936dab015 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2352,6 +2352,15 @@ when using it to configure your core name! Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_SOLR_PATH``. +dataverse.concurrency.max-async-indexes ++++++++++++++++++++ + +Maximum number of simultaneously running asynchronous dataset index operations. + +Defaults to ``4``. + +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_SOLR_CONCURRENCY_MAX_ASYNC_INDEXES``. + dataverse.rserve.host +++++++++++++++++++++ diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index 37aeee231e1..1277a98aa31 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -247,6 +247,12 @@ public Dataset execute(CommandContext ctxt) throws CommandException { logger.info("Successfully published the dataset "+readyDataset.getGlobalId().asString()); readyDataset = ctxt.em().merge(readyDataset); + + try { + ctxt.index().indexDataset(readyDataset, true); + } catch (SolrServerException | IOException e) { + throw new CommandException("Indexing failed: " + e.getMessage(), this); + } return readyDataset; } @@ -267,7 +273,6 @@ public boolean onSuccess(CommandContext ctxt, Object r) { } catch (Exception e) { logger.warning("Failure to send dataset published messages for : " + dataset.getId() + " : " + e.getMessage()); } - ctxt.index().asyncIndexDataset(dataset, true); //re-indexing dataverses that have additional subjects if (!dataversesToIndex.isEmpty()){ diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 5716b39e85c..cf1e58e4028 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -35,6 +35,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Future; +import java.util.concurrent.Semaphore; import java.util.function.Function; import java.util.logging.Logger; import java.util.stream.Collectors; @@ -341,6 +342,8 @@ public void indexDatasetInNewTransaction(Long datasetId) { //Dataset dataset) { private static final Map NEXT_TO_INDEX = new ConcurrentHashMap<>(); // indexingNow is a set of dataset ids of datasets being indexed asynchronously right now private static final Map INDEXING_NOW = new ConcurrentHashMap<>(); + // semaphore for async indexing + private static final Semaphore ASYNC_INDEX_SEMAPHORE = new Semaphore(JvmSettings.MAX_ASYNC_INDEXES.lookupOptional(Integer.class).orElse(4), true); // When you pass null as Dataset parameter to this method, it indicates that the indexing of the dataset with "id" has finished // Pass non-null Dataset to schedule it for indexing @@ -385,6 +388,19 @@ synchronized private static Dataset getNextToIndex(Long id, Dataset d) { */ @Asynchronous public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { + try { + ASYNC_INDEX_SEMAPHORE.acquire(); + doAyncIndexDataset(dataset, doNormalSolrDocCleanUp); + } catch (InterruptedException e) { + String failureLogText = "Indexing failed: interrupted. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); + } finally { + ASYNC_INDEX_SEMAPHORE.release(); + } + } + + private void doAyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { Long id = dataset.getId(); Dataset next = getNextToIndex(id, dataset); // if there is an ongoing index job for this dataset, next is null (ongoing index job will reindex the newest version after current indexing finishes) while (next != null) { @@ -402,7 +418,16 @@ public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { @Asynchronous public void asyncIndexDatasetList(List datasets, boolean doNormalSolrDocCleanUp) { for(Dataset dataset : datasets) { - asyncIndexDataset(dataset, true); + try { + ASYNC_INDEX_SEMAPHORE.acquire(); + doAyncIndexDataset(dataset, true); + } catch (InterruptedException e) { + String failureLogText = "Indexing failed: interrupted. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); + } finally { + ASYNC_INDEX_SEMAPHORE.release(); + } } } @@ -414,7 +439,7 @@ public void indexDvObject(DvObject objectIn) throws SolrServerException, IOExce } } - private void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { + public void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { doIndexDataset(dataset, doNormalSolrDocCleanUp); updateLastIndexedTime(dataset.getId()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index b92618dab89..8293c960c3b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -60,6 +60,10 @@ public enum JvmSettings { SOLR_CORE(SCOPE_SOLR, "core"), SOLR_PATH(SCOPE_SOLR, "path"), + // INDEX CONCURENCY + SCOPE_SOLR_CONCURENCY(SCOPE_SOLR, "concurrency"), + MAX_ASYNC_INDEXES(SCOPE_SOLR_CONCURENCY, "max-async-indexes"), + // RSERVE CONNECTION SCOPE_RSERVE(PREFIX, "rserve"), RSERVE_HOST(SCOPE_RSERVE, "host"), From 0002008ba3d81b1fc64604126b62d9a6b5ef6004 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Tue, 19 Mar 2024 11:04:30 +0100 Subject: [PATCH 0912/1112] fixed too short underline in config doc --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 06936dab015..318816d1050 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2353,7 +2353,7 @@ when using it to configure your core name! Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_SOLR_PATH``. dataverse.concurrency.max-async-indexes -+++++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++ Maximum number of simultaneously running asynchronous dataset index operations. From 5438e218de2a1f7a06a3498b82638002765ef13b Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 19 Mar 2024 09:54:58 -0400 Subject: [PATCH 0913/1112] Add changes after our slack conversation --- .../edu/harvard/iq/dataverse/api/Datasets.java | 8 ++++---- .../iq/dataverse/util/json/JsonPrinter.java | 17 +++++++++++------ 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f047b6b97bd..ba62e6e2042 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -437,6 +437,10 @@ public Response getVersion(@Context ContainerRequestContext crc, headers, includeDeaccessioned, checkPerms); + + if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) { + return notFound("Dataset version not found"); + } DatasetVersion latestDatasetVersion = null; @@ -451,10 +455,6 @@ public Response getVersion(@Context ContainerRequestContext crc, headers, deaccesionedLookup, checkPerms); - - if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) { - return notFound("Dataset version not found"); - } if (excludeFiles == null ? true : !excludeFiles) { requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 173a2125b40..254a02547d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -427,19 +427,24 @@ public static JsonObjectBuilder json(DatasetVersion dsv, .add("id", dsv.getId()).add("datasetId", dataset.getId()) .add("datasetPersistentId", dataset.getGlobalId().asString()) .add("storageIdentifier", dataset.getStorageIdentifier()) - .add("versionNumber", dsv.getVersionNumber()).add("versionMinorNumber", dsv.getMinorVersionNumber()) - .add("versionState", dsv.getVersionState().name()).add("versionNote", dsv.getVersionNote()) - .add("archiveNote", dsv.getArchiveNote()).add("deaccessionLink", dsv.getDeaccessionLink()) - .add("distributionDate", dsv.getDistributionDate()).add("productionDate", dsv.getProductionDate()) + .add("versionNumber", dsv.getVersionNumber()) + .add("versionMinorNumber", dsv.getMinorVersionNumber()) + .add("versionState", dsv.getVersionState().name()) + .add("versionNote", dsv.getVersionNote()) + .add("archiveNote", dsv.getArchiveNote()) + .add("deaccessionLink", dsv.getDeaccessionLink()) + .add("distributionDate", dsv.getDistributionDate()) + .add("productionDate", dsv.getProductionDate()) .add("UNF", dsv.getUNF()).add("archiveTime", format(dsv.getArchiveTime())) - .add("lastUpdateTime", format(dsv.getLastUpdateTime())).add("releaseTime", format(dsv.getReleaseTime())) + .add("lastUpdateTime", format(dsv.getLastUpdateTime())) + .add("releaseTime", format(dsv.getReleaseTime())) .add("createTime", format(dsv.getCreateTime())) .add("alternativePersistentId", dataset.getAlternativePersistentIdentifier()) .add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD()) .add("citationDate", dataset.getCitationDateFormattedYYYYMMDD()); if(latestDsv != null) { - bld.add("latestVersionPublishingStatus", latestDsv.getVersionState().name()).add("versionNote", latestDsv.getVersionNote()); + bld.add("latestVersionPublishingState", latestDsv.getVersionState().name()); } License license = DatasetUtil.getLicense(dsv); From 3220048999cfc431963f141717e54d75a4117485 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 19 Mar 2024 16:48:28 -0400 Subject: [PATCH 0914/1112] Release notes snippet --- doc/release-notes/10382-optional-croissant-exporter.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10382-optional-croissant-exporter.md diff --git a/doc/release-notes/10382-optional-croissant-exporter.md b/doc/release-notes/10382-optional-croissant-exporter.md new file mode 100644 index 00000000000..e5c47409a1b --- /dev/null +++ b/doc/release-notes/10382-optional-croissant-exporter.md @@ -0,0 +1 @@ +When a Dataverse installation is provided with a dataverse-exporter for the croissant format, the content for JSON-LD in the header will be replaced with the croissant format. However, both JSON-LD and Croissant will still be available for download on the Dataset page. \ No newline at end of file From 2925f3bb908c3b5405717288c3a3ab731435697c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Mar 2024 18:12:09 -0400 Subject: [PATCH 0915/1112] fix UI issue with default/inherited provider in sub-collections --- .../edu/harvard/iq/dataverse/DataversePage.java | 15 ++++++++++++--- .../harvard/iq/dataverse/DvObjectContainer.java | 4 ++-- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 10dfa4a0e4f..f35682b7bd0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -1303,9 +1303,18 @@ public Set> getPidProviderOptions() { Set providerIds = PidUtil.getManagedProviderIds(); Set> options = new HashSet>(); if (providerIds.size() > 1) { - String label = defaultPidProvider.getLabel() + BundleUtil.getStringFromBundle("dataverse.default") + ": " - + defaultPidProvider.getProtocol() + ":" + defaultPidProvider.getAuthority() - + defaultPidProvider.getSeparator() + defaultPidProvider.getShoulder(); + + String label = null; + if (this.dataverse.getOwner() != null && this.dataverse.getOwner().getEffectivePidGenerator()!= null) { + PidProvider inheritedPidProvider = this.dataverse.getOwner().getEffectivePidGenerator(); + label = inheritedPidProvider.getLabel() + BundleUtil.getStringFromBundle("dataverse.inherited") + ": " + + inheritedPidProvider.getProtocol() + ":" + inheritedPidProvider.getAuthority() + + inheritedPidProvider.getSeparator() + inheritedPidProvider.getShoulder(); + } else { + label = defaultPidProvider.getLabel() + BundleUtil.getStringFromBundle("dataverse.default") + ": " + + defaultPidProvider.getProtocol() + ":" + defaultPidProvider.getAuthority() + + defaultPidProvider.getSeparator() + defaultPidProvider.getShoulder(); + } Entry option = new AbstractMap.SimpleEntry("default", label); options.add(option); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index c991c4c02d2..bfb4b3ef749 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -201,9 +201,9 @@ public void setPidGeneratorSpecs(String pidGeneratorSpecs) { } // Used in JSF when selecting the PidGenerator + // It only returns an id if this dvObjectContainer has PidGenerator specs set on it, otherwise it returns "default" public String getPidGeneratorId() { - PidProvider pidGenerator = getEffectivePidGenerator(); - if (pidGenerator == null) { + if (StringUtils.isBlank(getPidGeneratorSpecs())) { return "default"; } else { return getEffectivePidGenerator().getId(); From 3d2bb41ed38f54b5ddd90a8181f538ac654447f3 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Mar 2024 18:12:54 -0400 Subject: [PATCH 0916/1112] adjust API to always return an id rather than "default" in some cases per the current documentation --- .../edu/harvard/iq/dataverse/api/Datasets.java | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 303b6b9adb8..2ea8e50a896 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -39,6 +39,7 @@ import edu.harvard.iq.dataverse.makedatacount.*; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; import edu.harvard.iq.dataverse.metrics.MetricsUtil; +import edu.harvard.iq.dataverse.pidproviders.PidProvider; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; @@ -4575,6 +4576,12 @@ public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext cr }, getRequestUser(crc)); } + /** + * Get the PidProvider that will be used for generating new DOIs in this dataset + * + * @return - the id of the effective PID generator for the given dataset + * @throws WrappedResponse + */ @GET @AuthRequired @Path("{identifier}/pidGenerator") @@ -4588,7 +4595,12 @@ public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - String pidGeneratorId = dataset.getPidGeneratorId(); + PidProvider pidProvider = dataset.getEffectivePidGenerator(); + if(pidProvider == null) { + //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it + return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id"); + } + String pidGeneratorId = pidProvider.getId(); return ok(pidGeneratorId); } From e1f2e66661d619d5b86f60f928d138d896520a4f Mon Sep 17 00:00:00 2001 From: landreev Date: Tue, 19 Mar 2024 18:26:24 -0400 Subject: [PATCH 0917/1112] One extra phrase added to the guide clarifying that "... restart is required ..." --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 70c1e40d76b..1a3ef88a5aa 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1383,7 +1383,7 @@ Rate limiting can be configured on a tier level with tier 0 being reserved for g Superuser accounts are exempt from rate limiting. Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. Two database settings configure the rate limiting. -Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. +Note: If either of these settings exist in the database rate limiting will be enabled (note that a Payara restart is required for the setting to take effect). If neither setting exists rate limiting is disabled. - :RateLimitingDefaultCapacityTiers is the number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... A value of -1 can be used to signify no rate limit. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." From 714f9f6dd4d0aced5a4947f24fb01753335efd6f Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 20 Mar 2024 12:03:45 +0100 Subject: [PATCH 0918/1112] fixed new property name scope --- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 318816d1050..dbf0bed234b 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2352,8 +2352,8 @@ when using it to configure your core name! Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_SOLR_PATH``. -dataverse.concurrency.max-async-indexes -+++++++++++++++++++++++++++++++++++++++ +dataverse.solr.concurrency.max-async-indexes +++++++++++++++++++++++++++++++++++++++++++++ Maximum number of simultaneously running asynchronous dataset index operations. From 1a3783a8a80378818f2e597cb38dd25adff844ce Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 20 Mar 2024 12:35:06 +0100 Subject: [PATCH 0919/1112] added short release note --- doc/release-notes/10381-index-after-publish.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10381-index-after-publish.md diff --git a/doc/release-notes/10381-index-after-publish.md b/doc/release-notes/10381-index-after-publish.md new file mode 100644 index 00000000000..84c84d75a28 --- /dev/null +++ b/doc/release-notes/10381-index-after-publish.md @@ -0,0 +1,3 @@ +New release adds a new microprofile setting for maximum number of simultaneously running asynchronous dataset index operations that defaults to ``4``: + +dataverse.solr.concurrency.max-async-indexes \ No newline at end of file From 7023fbfb4d6abd1356882b998941be3f72958544 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Wed, 20 Mar 2024 09:09:17 -0400 Subject: [PATCH 0920/1112] Update doc/release-notes/10382-optional-croissant-exporter.md Co-authored-by: Philip Durbin --- doc/release-notes/10382-optional-croissant-exporter.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10382-optional-croissant-exporter.md b/doc/release-notes/10382-optional-croissant-exporter.md index e5c47409a1b..e4c96115825 100644 --- a/doc/release-notes/10382-optional-croissant-exporter.md +++ b/doc/release-notes/10382-optional-croissant-exporter.md @@ -1 +1 @@ -When a Dataverse installation is provided with a dataverse-exporter for the croissant format, the content for JSON-LD in the header will be replaced with the croissant format. However, both JSON-LD and Croissant will still be available for download on the Dataset page. \ No newline at end of file +When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the `` of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. \ No newline at end of file From 91bb468a21d9e430ab0c6940c3db09ab011da86c Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 20 Mar 2024 11:09:12 -0400 Subject: [PATCH 0921/1112] adding two specific commands CheckRateLimitForDatasetPage and CheckRateLimitForCollectionPage --- .../edu/harvard/iq/dataverse/DatasetPage.java | 9 ++++++++- .../edu/harvard/iq/dataverse/DataversePage.java | 10 +++++++++- .../impl/CheckRateLimitForCollectionPage.java | 16 ++++++++++++++++ .../impl/CheckRateLimitForDatasetPage.java | 17 +++++++++++++++++ 4 files changed, 50 insertions(+), 2 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPage.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPage.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 05325a26f3a..4daa1fbadaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -24,6 +24,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForDatasetPage; import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand; @@ -36,6 +37,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.export.ExportService; +import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean; import io.gdcc.spi.export.ExportException; import io.gdcc.spi.export.Exporter; import edu.harvard.iq.dataverse.ingest.IngestRequest; @@ -242,6 +244,8 @@ public enum DisplayMode { SolrClientService solrClientService; @EJB DvObjectServiceBean dvObjectService; + @EJB + CacheFactoryBean cacheFactory; @Inject DataverseRequestServiceBean dvRequestService; @Inject @@ -1930,7 +1934,10 @@ private void setIdByPersistentId() { } private String init(boolean initFull) { - + // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing. + if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForDatasetPage(null,null))) { + return BundleUtil.getStringFromBundle("command.exception.user.ratelimited", Arrays.asList(CheckRateLimitForDatasetPage.class.getSimpleName())); + } //System.out.println("_YE_OLDE_QUERY_COUNTER_"); // for debug purposes setDataverseSiteUrl(systemConfig.getDataverseSiteUrl()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 10dfa4a0e4f..4f0a3f14b99 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.dataverse.DataverseUtil; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForCollectionPage; import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateSavedSearchCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand; @@ -31,6 +32,8 @@ import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.List; + +import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean; import jakarta.ejb.EJB; import jakarta.faces.application.FacesMessage; import jakarta.faces.context.FacesContext; @@ -118,6 +121,8 @@ public enum LinkMode { @Inject DataverseHeaderFragment dataverseHeaderFragment; @EJB PidProviderFactoryBean pidProviderFactoryBean; + @EJB + CacheFactoryBean cacheFactory; private Dataverse dataverse = new Dataverse(); @@ -318,7 +323,10 @@ public void updateOwnerDataverse() { public String init() { //System.out.println("_YE_OLDE_QUERY_COUNTER_"); // for debug purposes - + // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing. + if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForCollectionPage(null,null))) { + return BundleUtil.getStringFromBundle("command.exception.user.ratelimited", Arrays.asList(CheckRateLimitForCollectionPage.class.getSimpleName())); + } if (this.getAlias() != null || this.getId() != null || this.getOwnerId() == null) {// view mode for a dataverse if (this.getAlias() != null) { dataverse = dataverseService.findByAlias(this.getAlias()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPage.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPage.java new file mode 100644 index 00000000000..9dcf0428fff --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPage.java @@ -0,0 +1,16 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +public class CheckRateLimitForCollectionPage extends AbstractVoidCommand { + public CheckRateLimitForCollectionPage(DataverseRequest aRequest, DvObject dvObject) { + super(aRequest, dvObject); + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPage.java new file mode 100644 index 00000000000..04a27d082f4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPage.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +public class CheckRateLimitForDatasetPage extends AbstractVoidCommand { + + public CheckRateLimitForDatasetPage(DataverseRequest aRequest, DvObject dvObject) { + super(aRequest, dvObject); + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { } +} From a9b2514620a6e4f1fb1377936423c2527800400c Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 20 Mar 2024 15:54:03 -0400 Subject: [PATCH 0922/1112] add check for existing cache before creating a new one --- .../iq/dataverse/util/cache/CacheFactoryBean.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java index c2781f3f4b8..36b2b35b48f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java @@ -32,10 +32,13 @@ public class CacheFactoryBean implements java.io.Serializable { @PostConstruct public void init() { - CompleteConfiguration config = - new MutableConfiguration() - .setTypes( String.class, String.class ); - rateLimitCache = manager.createCache(RATE_LIMIT_CACHE, config); + rateLimitCache = manager.getCache(RATE_LIMIT_CACHE); + if (rateLimitCache == null) { + CompleteConfiguration config = + new MutableConfiguration() + .setTypes( String.class, String.class ); + rateLimitCache = manager.createCache(RATE_LIMIT_CACHE, config); + } } /** From caf2d91c234cd2a16930d1211c5e1b3b2b8e5a46 Mon Sep 17 00:00:00 2001 From: Gustavo Durand Date: Wed, 20 Mar 2024 16:19:11 -0400 Subject: [PATCH 0923/1112] removed required attr from selectone component --- src/main/webapp/metadataFragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 200d2917b9a..0fab34e1e58 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -285,7 +285,7 @@
    + id="unique1" rendered="#{!dsf.datasetFieldType.allowMultiples}" filter="#{(dsf.datasetFieldType.controlledVocabularyValues.size() lt 10) ? 'false':'true'}" filterMatchMode="contains"> From b678cbbcd0c13b1e89761e48b6d89eada46b6367 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 21 Mar 2024 11:03:07 -0400 Subject: [PATCH 0924/1112] Documentation update to java requirements Documentation update to java requirements --- doc/sphinx-guides/source/developers/classic-dev-env.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst index 6978f389e01..82e10b727ef 100755 --- a/doc/sphinx-guides/source/developers/classic-dev-env.rst +++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst @@ -37,7 +37,7 @@ Windows is gaining support through Docker as described in the :doc:`windows` sec Install Java ~~~~~~~~~~~~ -The Dataverse Software requires Java 11. +The Dataverse Software requires Java 17. We suggest downloading OpenJDK from https://adoptopenjdk.net From 1b826fb4232707d044f5353ce0ac225eaa12f667 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 21 Mar 2024 15:23:19 -0400 Subject: [PATCH 0925/1112] Fix the issue with the order of the facets and also expose this facet to all users --- .../dataverse/search/SearchServiceBean.java | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 0b93c617c1a..079e3ec35c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -187,19 +187,11 @@ public SolrQueryResponse search( SolrQuery solrQuery = new SolrQuery(); query = SearchUtil.sanitizeQuery(query); solrQuery.setQuery(query); -// SortClause foo = new SortClause("name", SolrQuery.ORDER.desc); -// if (query.equals("*") || query.equals("*:*")) { -// solrQuery.setSort(new SortClause(SearchFields.NAME_SORT, SolrQuery.ORDER.asc)); if (sortField != null) { // is it ok not to specify any sort? - there are cases where we // don't care, and it must cost some extra cycles -- L.A. solrQuery.setSort(new SortClause(sortField, sortOrder)); } -// } else { -// solrQuery.setSort(sortClause); -// } -// solrQuery.setSort(sortClause); - solrQuery.setParam("fl", "*,score"); solrQuery.setParam("qt", "/select"); @@ -222,6 +214,14 @@ public SolrQueryResponse search( } List metadataBlockFacets = new LinkedList<>(); + + /* + * We talked about this in slack on 2021-09-14, Users can see objects on draft/unpublished + * if the owner gives permissions to all users so it makes sense to expose this facet + * to all users. The request of this change started because the order of the facets were + * changed with the PR #9635 and this was unintended. + */ + solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); if (addFacets) { // ----------------------------------- @@ -251,6 +251,7 @@ public SolrQueryResponse search( DatasetFieldType datasetField = dataverseFacet.getDatasetFieldType(); solrQuery.addFacetField(datasetField.getSolrField().getNameFacetable()); } + // Get all metadata block facets configured to be displayed metadataBlockFacets.addAll(dataverse.getMetadataBlockFacets()); } @@ -1029,11 +1030,11 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ AuthenticatedUser au = (AuthenticatedUser) user; - if (addFacets) { - // Logged in user, has publication status facet - // - solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); - } + // if (addFacets) { + // // Logged in user, has publication status facet + // // + // solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); + // } // ---------------------------------------------------- // (3) Is this a Super User? From d3ef749036e782d9de137522e255e94e565491bc Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 21 Mar 2024 15:42:14 -0400 Subject: [PATCH 0926/1112] Add release notes --- doc/release-notes/10338-expose-and-sort-publish-status-facet.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10338-expose-and-sort-publish-status-facet.md diff --git a/doc/release-notes/10338-expose-and-sort-publish-status-facet.md b/doc/release-notes/10338-expose-and-sort-publish-status-facet.md new file mode 100644 index 00000000000..a14ea62da67 --- /dev/null +++ b/doc/release-notes/10338-expose-and-sort-publish-status-facet.md @@ -0,0 +1 @@ +In version 6.1, the publication status facet location was unintentionally moved to the bottom, and it also prevented it from being visible to guest users. In version 6.2, we have restored its visibility to all users and moved it back to the top of the list. \ No newline at end of file From 77e4eac90fc9f3bd7125209df2a99577606789bf Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 21 Mar 2024 16:01:11 -0400 Subject: [PATCH 0927/1112] Change should still be inside the logic for the facets --- .../iq/dataverse/search/SearchServiceBean.java | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 079e3ec35c6..4e345a1e036 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -214,16 +214,17 @@ public SolrQueryResponse search( } List metadataBlockFacets = new LinkedList<>(); - - /* - * We talked about this in slack on 2021-09-14, Users can see objects on draft/unpublished - * if the owner gives permissions to all users so it makes sense to expose this facet - * to all users. The request of this change started because the order of the facets were - * changed with the PR #9635 and this was unintended. - */ - solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); if (addFacets) { + + /* + * We talked about this in slack on 2021-09-14, Users can see objects on draft/unpublished + * if the owner gives permissions to all users so it makes sense to expose this facet + * to all users. The request of this change started because the order of the facets were + * changed with the PR #9635 and this was unintended. + */ + solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); + // ----------------------------------- // Facets to Retrieve // ----------------------------------- From 59d16b9ad7b5ee5f72a9f0b433c6a5d78a03d5d4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 21 Mar 2024 16:04:32 -0400 Subject: [PATCH 0928/1112] clarify how to increase timeout in docker demo #10409 --- doc/sphinx-guides/source/container/running/demo.rst | 8 +++++++- docker/compose/demo/compose.yml | 2 ++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 24027e677a1..2991c677618 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -59,6 +59,8 @@ Edit the ``compose.yml`` file and look for the following section. container_name: "bootstrap" image: gdcc/configbaker:alpha restart: "no" + environment: + - TIMEOUT=3m command: - bootstrap.sh - dev @@ -189,13 +191,17 @@ Windows support is experimental but we are very interested in supporting Windows Bootstrapping Did Not Complete ++++++++++++++++++++++++++++++ -In the compose file, try increasing the timeout in the bootstrap container by adding something like this: +In the compose file, try increasing the timeout for the bootstrap container: .. code-block:: bash environment: - TIMEOUT=10m +As described above, you'll want to stop containers, delete data, and start over with ``docker compose up``. To make sure the increased timeout is in effect, you can run ``docker logs bootstrap`` and look for the new value in the output: + +``Waiting for http://dataverse:8080 to become ready in max 10m.`` + Wrapping Up ----------- diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index e4bcc9778d7..8f1af3e396b 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -49,6 +49,8 @@ services: container_name: "bootstrap" image: gdcc/configbaker:alpha restart: "no" + environment: + - TIMEOUT=3m command: - bootstrap.sh - dev From 4093e18e19a6d872bcd4b24612748193f3080a1e Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 21 Mar 2024 17:04:33 -0400 Subject: [PATCH 0929/1112] Fix order and patch notes --- .../10338-expose-and-sort-publish-status-facet.md | 2 +- .../iq/dataverse/search/SearchServiceBean.java | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/doc/release-notes/10338-expose-and-sort-publish-status-facet.md b/doc/release-notes/10338-expose-and-sort-publish-status-facet.md index a14ea62da67..b2362ddb2c5 100644 --- a/doc/release-notes/10338-expose-and-sort-publish-status-facet.md +++ b/doc/release-notes/10338-expose-and-sort-publish-status-facet.md @@ -1 +1 @@ -In version 6.1, the publication status facet location was unintentionally moved to the bottom, and it also prevented it from being visible to guest users. In version 6.2, we have restored its visibility to all users and moved it back to the top of the list. \ No newline at end of file +In version 6.1, the publication status facet location was unintentionally moved to the bottom. In this version, we have restored the original order. diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 4e345a1e036..c6f08151050 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -217,13 +217,7 @@ public SolrQueryResponse search( if (addFacets) { - /* - * We talked about this in slack on 2021-09-14, Users can see objects on draft/unpublished - * if the owner gives permissions to all users so it makes sense to expose this facet - * to all users. The request of this change started because the order of the facets were - * changed with the PR #9635 and this was unintended. - */ - solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); + // ----------------------------------- // Facets to Retrieve @@ -232,6 +226,13 @@ public SolrQueryResponse search( solrQuery.addFacetField(SearchFields.DATAVERSE_CATEGORY); solrQuery.addFacetField(SearchFields.METADATA_SOURCE); solrQuery.addFacetField(SearchFields.PUBLICATION_YEAR); + /* + * We talked about this in slack on 2021-09-14, Users can see objects on draft/unpublished + * if the owner gives permissions to all users so it makes sense to expose this facet + * to all users. The request of this change started because the order of the facets were + * changed with the PR #9635 and this was unintended. + */ + solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); solrQuery.addFacetField(SearchFields.DATASET_LICENSE); /** * @todo when a new method on datasetFieldService is available From a8aaa11f2709959022171b2e61b552d9c1cd1d35 Mon Sep 17 00:00:00 2001 From: landreev Date: Thu, 21 Mar 2024 17:07:56 -0400 Subject: [PATCH 0930/1112] The commits that didn't make it. #9356 (#10407) --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 8 +++++--- src/main/java/edu/harvard/iq/dataverse/DataversePage.java | 8 +++++--- .../java/edu/harvard/iq/dataverse/NavigationWrapper.java | 5 +++++ ...e.java => CheckRateLimitForCollectionPageCommand.java} | 4 ++-- ...Page.java => CheckRateLimitForDatasetPageCommand.java} | 4 ++-- 5 files changed, 19 insertions(+), 10 deletions(-) rename src/main/java/edu/harvard/iq/dataverse/engine/command/impl/{CheckRateLimitForCollectionPage.java => CheckRateLimitForCollectionPageCommand.java} (73%) rename src/main/java/edu/harvard/iq/dataverse/engine/command/impl/{CheckRateLimitForDatasetPage.java => CheckRateLimitForDatasetPageCommand.java} (74%) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index d7722f55512..2e4cb56db48 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -24,7 +24,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForDatasetPage; +import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForDatasetPageCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand; @@ -252,6 +252,8 @@ public enum DisplayMode { DatasetVersionUI datasetVersionUI; @Inject PermissionsWrapper permissionsWrapper; + @Inject + NavigationWrapper navigationWrapper; @Inject FileDownloadHelper fileDownloadHelper; @Inject @@ -1935,8 +1937,8 @@ private void setIdByPersistentId() { private String init(boolean initFull) { // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing. - if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForDatasetPage(null,null))) { - return BundleUtil.getStringFromBundle("command.exception.user.ratelimited", Arrays.asList(CheckRateLimitForDatasetPage.class.getSimpleName())); + if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForDatasetPageCommand(null,null))) { + return navigationWrapper.tooManyRequests(); } //System.out.println("_YE_OLDE_QUERY_COUNTER_"); // for debug purposes setDataverseSiteUrl(systemConfig.getDataverseSiteUrl()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 4f0a3f14b99..afdff38c588 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -9,7 +9,7 @@ import edu.harvard.iq.dataverse.dataverse.DataverseUtil; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForCollectionPage; +import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForCollectionPageCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateSavedSearchCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand; @@ -118,6 +118,8 @@ public enum LinkMode { @EJB DataverseLinkingServiceBean linkingService; @Inject PermissionsWrapper permissionsWrapper; + @Inject + NavigationWrapper navigationWrapper; @Inject DataverseHeaderFragment dataverseHeaderFragment; @EJB PidProviderFactoryBean pidProviderFactoryBean; @@ -324,8 +326,8 @@ public void updateOwnerDataverse() { public String init() { //System.out.println("_YE_OLDE_QUERY_COUNTER_"); // for debug purposes // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing. - if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForCollectionPage(null,null))) { - return BundleUtil.getStringFromBundle("command.exception.user.ratelimited", Arrays.asList(CheckRateLimitForCollectionPage.class.getSimpleName())); + if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForCollectionPageCommand(null,null))) { + return navigationWrapper.tooManyRequests(); } if (this.getAlias() != null || this.getId() != null || this.getOwnerId() == null) {// view mode for a dataverse if (this.getAlias() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java index 832d7ec19ef..54fb8f211a6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java @@ -16,6 +16,7 @@ import java.util.logging.Logger; import jakarta.faces.context.FacesContext; import jakarta.faces.view.ViewScoped; +import jakarta.ws.rs.core.Response.Status; import jakarta.inject.Inject; import jakarta.inject.Named; import jakarta.servlet.http.HttpServletRequest; @@ -87,6 +88,10 @@ public String notAuthorized(){ } } + public String tooManyRequests() { + return sendError(Status.TOO_MANY_REQUESTS.getStatusCode()); + } + public String notFound() { return sendError(HttpServletResponse.SC_NOT_FOUND); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPage.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPageCommand.java similarity index 73% rename from src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPage.java rename to src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPageCommand.java index 9dcf0428fff..b23e6034c9a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForCollectionPageCommand.java @@ -6,8 +6,8 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -public class CheckRateLimitForCollectionPage extends AbstractVoidCommand { - public CheckRateLimitForCollectionPage(DataverseRequest aRequest, DvObject dvObject) { +public class CheckRateLimitForCollectionPageCommand extends AbstractVoidCommand { + public CheckRateLimitForCollectionPageCommand(DataverseRequest aRequest, DvObject dvObject) { super(aRequest, dvObject); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPageCommand.java similarity index 74% rename from src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPage.java rename to src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPageCommand.java index 04a27d082f4..da8c1e4d8e3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetPageCommand.java @@ -6,9 +6,9 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -public class CheckRateLimitForDatasetPage extends AbstractVoidCommand { +public class CheckRateLimitForDatasetPageCommand extends AbstractVoidCommand { - public CheckRateLimitForDatasetPage(DataverseRequest aRequest, DvObject dvObject) { + public CheckRateLimitForDatasetPageCommand(DataverseRequest aRequest, DvObject dvObject) { super(aRequest, dvObject); } From d0bcddcead068fc99a6fc63c7ac3b3cf9c0c6c1a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 22 Mar 2024 10:15:41 -0400 Subject: [PATCH 0931/1112] add to QA checklist: make sure deploy to beta works --- doc/sphinx-guides/source/qa/qa-workflow.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/sphinx-guides/source/qa/qa-workflow.md b/doc/sphinx-guides/source/qa/qa-workflow.md index 9915fe97d98..af462653dca 100644 --- a/doc/sphinx-guides/source/qa/qa-workflow.md +++ b/doc/sphinx-guides/source/qa/qa-workflow.md @@ -98,3 +98,7 @@ 1. Delete merged branch Just a housekeeping move if the PR is from IQSS. Click the delete branch button where the merge button had been. There is no deletion for outside contributions. + +1. Ensure that deployment to beta.dataverse.org succeeded. + + Go to to keep any eye on the deployment to to make sure it succeeded. The latest commit will appear at the bottom right and . From a022e2e1ec31981130fb2485653168f9d12ab9a0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 22 Mar 2024 16:19:49 +0100 Subject: [PATCH 0932/1112] fix(ct): downgrade configbaker to Alpine 3.18 #10413 --- modules/container-configbaker/Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 9b98334d72b..91bf5a2c875 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -10,7 +10,10 @@ ARG SOLR_VERSION FROM solr:${SOLR_VERSION} AS solr # Let's build us a baker -FROM alpine:3 +# WARNING: +# Do not upgrade the tag to :3 or :3.19 until https://pkgs.alpinelinux.org/package/v3.19/main/x86_64/c-ares is at v1.26.0+! +# See https://github.com/IQSS/dataverse/issues/10413 for more information. +FROM alpine:3.18 ENV SCRIPT_DIR="/scripts" \ SECRETS_DIR="/secrets" \ From 2129555af12b244c92d2c3c82659fd7cb79fc6bd Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 22 Mar 2024 15:15:05 -0400 Subject: [PATCH 0933/1112] fixing bug with tabs in log line for MDC --- .../MakeDataCountLoggingServiceBean.java | 18 ++++++++++++------ .../MakeDataCountLoggingServiceBeanTest.java | 13 +++++++++---- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java index 5edf2fde0c3..c3bf85e699a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java @@ -46,6 +46,12 @@ public void logEntry(MakeDataCountEntry entry) { public String getLogFileName() { return "counter_"+new SimpleDateFormat("yyyy-MM-dd").format(new Timestamp(new Date().getTime()))+".log"; } + + // Sanitize the values to a safe string for the log file + static String sanitize(String in) { + // Log lines are tab delimited so tabs must be replaced. Replacing escape sequences with a space. + return in != null ? in.replaceAll("\\s+", " ") : null; + } public static class MakeDataCountEntry { @@ -367,7 +373,7 @@ public String getTitle() { * @param title the title to set */ public final void setTitle(String title) { - this.title = title; + this.title = sanitize(title); } /** @@ -384,7 +390,7 @@ public String getPublisher() { * @param publisher the publisher to set */ public final void setPublisher(String publisher) { - this.publisher = publisher; + this.publisher = sanitize(publisher); } /** @@ -401,7 +407,7 @@ public String getPublisherId() { * @param publisherId the publisherId to set */ public final void setPublisherId(String publisherId) { - this.publisherId = publisherId; + this.publisherId = sanitize(publisherId); } /** @@ -418,7 +424,7 @@ public String getAuthors() { * @param authors the authors to set */ public final void setAuthors(String authors) { - this.authors = authors; + this.authors = sanitize(authors); } /** @@ -452,7 +458,7 @@ public String getVersion() { * @param version the version to set */ public final void setVersion(String version) { - this.version = version; + this.version = sanitize(version); } /** @@ -469,7 +475,7 @@ public String getOtherId() { * @param otherId the otherId to set */ public void setOtherId(String otherId) { - this.otherId = otherId; + this.otherId = sanitize(otherId); } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java index c1051a57db8..2a673ee4e79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java @@ -21,7 +21,6 @@ import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.MatcherAssert.assertThat; -import org.hamcrest.MatcherAssert; import org.junit.jupiter.api.Test; /** @@ -45,8 +44,8 @@ public void testMainAndFileConstructor() { GlobalId id = dataset.getGlobalId(); dataset.setGlobalId(id); dvVersion.setDataset(dataset); - dvVersion.setAuthorsStr("OneAuthor;TwoAuthor"); - dvVersion.setTitle("Title"); + dvVersion.setAuthorsStr("OneAuthor;\tTwoAuthor"); + dvVersion.setTitle("Title\tWith Tab"); dvVersion.setVersionNumber(1L); dvVersion.setReleaseTime(new Date()); @@ -64,7 +63,13 @@ public void testMainAndFileConstructor() { //lastly setting attributes we don't actually use currently in our logging/constructors, just in case entry.setUserCookieId("UserCookId"); - entry.setOtherId("OtherId"); + entry.setOtherId(null); // null pointer check for sanitize method + assertThat(entry.getOtherId(), is("-")); + entry.setOtherId("OtherId\t\r\nX"); + // escape sequences get replaced with a space in sanitize method + assertThat(entry.getOtherId(), is("OtherId X")); + // check other replacements for author list ";" becomes "|" + assertThat(entry.getAuthors(), is("OneAuthor| TwoAuthor")); //And test. "-" is the default assertThat(entry.getEventTime(), is(not("-"))); From 8299bac272e755dc204afe1160714f86b57c29b0 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Sat, 23 Mar 2024 16:40:52 -0400 Subject: [PATCH 0934/1112] Menu clarification for Mac/Windows/Linux Menu clarification for Mac/Windows/Linux --- doc/sphinx-guides/source/container/dev-usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index be4eda5da44..6d6291e9924 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -259,7 +259,7 @@ Hotswapping methods requires using JDWP (Debug Mode), but does not allow switchi **IMPORTANT**: This requires installation of the `Docker plugin `_. - **NOTE**: You might need to change the Docker Compose executable in your IDE settings to ``docker`` if you have no ``docker-compose`` bin (*File > Settings > Build > Docker > Tools*). + **NOTE**: You might need to change the Docker Compose executable in your IDE settings to ``docker`` if you have no ``docker-compose`` bin, Start from the ``File`` menu if you are on Linux/Windows or ``IntelliJ IDEA`` on Mac and then go to Settings > Build > Docker > Tools. (*File > Settings > Build > Docker > Tools*). .. image:: img/intellij-compose-add-new-config.png From db9cd865d9b1796d6379c0133aadc329183d83c3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 25 Mar 2024 08:38:59 +0100 Subject: [PATCH 0935/1112] docs(mail): apply suggestions from code review Thanks @pdurbin! Co-authored-by: Philip Durbin --- doc/release-notes/7424-mailsession.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index 25b1d39a471..43846b0b72d 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -7,6 +7,6 @@ At this point, no action is required if you want to keep your current configurat Warnings will show in your server logs to inform and remind you about the deprecation. A future major release of Dataverse may remove this way of configuration. -For more details on how to configure your the connection to your mail provider, please find updated details within the Installation Guide's main installation and configuration section. +For more details on how to configure the connection to your mail provider, please find updated details within the Installation Guide's main installation and configuration section. -Please note: as there have been problems with mails delivered to SPAM folders when "From" within mail envelope and mail session configuration mismatched, as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. \ No newline at end of file +Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. \ No newline at end of file From 83d29b122f89e3d902a8dc9b83938c4a702de1c1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 25 Mar 2024 09:08:20 +0100 Subject: [PATCH 0936/1112] feat(ct): add MTA config to demo compose #7424 --- docker/compose/demo/compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 8f1af3e396b..6c2bdcf79a4 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -14,6 +14,8 @@ services: DATAVERSE_DB_PASSWORD: secret DATAVERSE_DB_USER: dataverse DATAVERSE_FEATURE_API_BEARER_AUTH: "1" + DATAVERSE_MAIL_SYSTEM_EMAIL: "Demo Dataverse " + DATAVERSE_MAIL_MTA_HOST: "smtp" JVM_ARGS: -Ddataverse.files.storage-driver-id=file1 -Ddataverse.files.file1.type=file -Ddataverse.files.file1.label=Filesystem From b42983caec39e773f30402077c823ef480bda6a7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 25 Mar 2024 11:14:08 +0100 Subject: [PATCH 0937/1112] feat(index): add timing metrics to measure indexing load - Measure wait times for a permit, a measurement how many indexing requests are currently stuck because of high demand (so you can tune the amount of available parallel indexing threads) - Measure how long the actual indexing task runs, which might be an indication of lots of very large datasets or not enough resources for your index --- pom.xml | 5 +++ .../iq/dataverse/search/IndexServiceBean.java | 31 +++++++++++++++++-- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 8b2850e1df9..df4cadc80ce 100644 --- a/pom.xml +++ b/pom.xml @@ -179,6 +179,11 @@ microprofile-config-api provided + + org.eclipse.microprofile.metrics + microprofile-metrics-api + provided + jakarta.platform jakarta.jakartaee-api diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index cf1e58e4028..5bac9a3e804 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -48,6 +48,8 @@ import jakarta.ejb.Stateless; import jakarta.ejb.TransactionAttribute; import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW; + +import jakarta.inject.Inject; import jakarta.inject.Named; import jakarta.json.JsonObject; import jakarta.persistence.EntityManager; @@ -72,6 +74,9 @@ import org.apache.tika.sax.BodyContentHandler; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; +import org.eclipse.microprofile.metrics.MetricUnits; +import org.eclipse.microprofile.metrics.Timer; +import org.eclipse.microprofile.metrics.annotation.Metric; import org.xml.sax.ContentHandler; @Stateless @@ -344,6 +349,27 @@ public void indexDatasetInNewTransaction(Long datasetId) { //Dataset dataset) { private static final Map INDEXING_NOW = new ConcurrentHashMap<>(); // semaphore for async indexing private static final Semaphore ASYNC_INDEX_SEMAPHORE = new Semaphore(JvmSettings.MAX_ASYNC_INDEXES.lookupOptional(Integer.class).orElse(4), true); + + @Inject + @Metric(name = "index_permit_wait_time", absolute = true, unit = MetricUnits.NANOSECONDS, + description = "Displays how long does it take to receive a permit to index a dataset") + Timer indexPermitWaitTimer; + + @Inject + @Metric(name = "index_time", absolute = true, unit = MetricUnits.NANOSECONDS, + description = "Displays how long does it take to index a dataset") + Timer indexTimer; + + /** + * Try to acquire a permit from the semaphore avoiding too many parallel indexes, potentially overwhelming Solr. + * This method will time the duration waiting for the permit, allowing indexing performance to be measured. + * @throws InterruptedException + */ + private void acquirePermitFromSemaphore() throws InterruptedException { + try (var timeContext = indexPermitWaitTimer.time()) { + ASYNC_INDEX_SEMAPHORE.acquire(); + } + } // When you pass null as Dataset parameter to this method, it indicates that the indexing of the dataset with "id" has finished // Pass non-null Dataset to schedule it for indexing @@ -389,7 +415,7 @@ synchronized private static Dataset getNextToIndex(Long id, Dataset d) { @Asynchronous public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { try { - ASYNC_INDEX_SEMAPHORE.acquire(); + acquirePermitFromSemaphore(); doAyncIndexDataset(dataset, doNormalSolrDocCleanUp); } catch (InterruptedException e) { String failureLogText = "Indexing failed: interrupted. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); @@ -404,7 +430,8 @@ private void doAyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) Long id = dataset.getId(); Dataset next = getNextToIndex(id, dataset); // if there is an ongoing index job for this dataset, next is null (ongoing index job will reindex the newest version after current indexing finishes) while (next != null) { - try { + // Time context will automatically start on creation and stop when leaving the try block + try (var timeContext = indexTimer.time()) { indexDataset(next, doNormalSolrDocCleanUp); } catch (Exception e) { // catch all possible exceptions; otherwise when something unexpected happes the dataset wold remain locked and impossible to reindex String failureLogText = "Indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); From ee3dbc09fe1d1bc4206b3459eaed7af140a85509 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 25 Mar 2024 11:19:56 +0000 Subject: [PATCH 0938/1112] Added: displayOnCreate field to MetadataBlock and DatasetFieldType payloads --- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 005ae2f2892..c38c1610db6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -607,6 +607,7 @@ public static JsonObjectBuilder json(MetadataBlock blk) { bld.add("id", blk.getId()); bld.add("name", blk.getName()); bld.add("displayName", blk.getDisplayName()); + bld.add("displayOnCreate", blk.isDisplayOnCreate()); JsonObjectBuilder fieldsBld = jsonObjectBuilder(); for (DatasetFieldType df : new TreeSet<>(blk.getDatasetFieldTypes())) { @@ -622,6 +623,7 @@ public static JsonObjectBuilder json(DatasetFieldType fld) { JsonObjectBuilder fieldsBld = jsonObjectBuilder(); fieldsBld.add("name", fld.getName()); fieldsBld.add("displayName", fld.getDisplayName()); + fieldsBld.add("displayOnCreate", fld.isDisplayOnCreate()); fieldsBld.add("title", fld.getTitle()); fieldsBld.add("type", fld.getFieldType().toString()); fieldsBld.add("typeClass", typeClassString(fld)); From 77789db2ace41462e405864405b6daba0c17b1cd Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 25 Mar 2024 12:39:16 +0100 Subject: [PATCH 0939/1112] reverted back to async index after publish, but now with em.flush() after index time udate --- .../command/impl/FinalizeDatasetPublicationCommand.java | 7 +------ .../edu/harvard/iq/dataverse/search/IndexServiceBean.java | 1 + 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index 1277a98aa31..3b124b539c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -247,12 +247,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { logger.info("Successfully published the dataset "+readyDataset.getGlobalId().asString()); readyDataset = ctxt.em().merge(readyDataset); - - try { - ctxt.index().indexDataset(readyDataset, true); - } catch (SolrServerException | IOException e) { - throw new CommandException("Indexing failed: " + e.getMessage(), this); - } return readyDataset; } @@ -273,6 +267,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) { } catch (Exception e) { logger.warning("Failure to send dataset published messages for : " + dataset.getId() + " : " + e.getMessage()); } + ctxt.index().asyncIndexDataset(dataset, true); //re-indexing dataverses that have additional subjects if (!dataversesToIndex.isEmpty()){ diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index cf1e58e4028..a5ea46e45b9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1503,6 +1503,7 @@ private void updateLastIndexedTimeInNewTransaction(Long id) { DvObject dvObjectToModify = em.find(DvObject.class, id); dvObjectToModify.setIndexTime(new Timestamp(new Date().getTime())); dvObjectToModify = em.merge(dvObjectToModify); + em.flush(); } /** From 8945ec86acce638222a2c0b7eaa77ede0a9be3f5 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 25 Mar 2024 13:29:02 +0100 Subject: [PATCH 0940/1112] moved indexing after last dataset merge to assure indextime is not overwritten --- .../command/impl/FinalizeDatasetPublicationCommand.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index 3b124b539c2..287e877f6e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -267,7 +267,6 @@ public boolean onSuccess(CommandContext ctxt, Object r) { } catch (Exception e) { logger.warning("Failure to send dataset published messages for : " + dataset.getId() + " : " + e.getMessage()); } - ctxt.index().asyncIndexDataset(dataset, true); //re-indexing dataverses that have additional subjects if (!dataversesToIndex.isEmpty()){ @@ -297,7 +296,8 @@ public boolean onSuccess(CommandContext ctxt, Object r) { logger.log(Level.WARNING, "Finalization: exception caught while exporting: "+ex.getMessage(), ex); // ... but it is important to only update the export time stamp if the // export was indeed successful. - } + } + ctxt.index().asyncIndexDataset(dataset, true); return retVal; } From 053ebdb1e0805de73d0c41cdb8d9fd4b9c25abe3 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 25 Mar 2024 13:52:40 +0100 Subject: [PATCH 0941/1112] metric fix --- .../java/edu/harvard/iq/dataverse/search/IndexServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index eae8e470ddc..cf0b177df95 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -446,7 +446,7 @@ private void doAyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) public void asyncIndexDatasetList(List datasets, boolean doNormalSolrDocCleanUp) { for(Dataset dataset : datasets) { try { - ASYNC_INDEX_SEMAPHORE.acquire(); + acquirePermitFromSemaphore(); doAyncIndexDataset(dataset, true); } catch (InterruptedException e) { String failureLogText = "Indexing failed: interrupted. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); From 744113d91461e79c37a997fdbea22d4ab0a7eb40 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Mon, 25 Mar 2024 09:37:34 -0400 Subject: [PATCH 0942/1112] Update doc/sphinx-guides/source/container/dev-usage.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/container/dev-usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 6d6291e9924..a28757165c5 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -259,7 +259,7 @@ Hotswapping methods requires using JDWP (Debug Mode), but does not allow switchi **IMPORTANT**: This requires installation of the `Docker plugin `_. - **NOTE**: You might need to change the Docker Compose executable in your IDE settings to ``docker`` if you have no ``docker-compose`` bin, Start from the ``File`` menu if you are on Linux/Windows or ``IntelliJ IDEA`` on Mac and then go to Settings > Build > Docker > Tools. (*File > Settings > Build > Docker > Tools*). + **NOTE**: You might need to change the Docker Compose executable in your IDE settings to ``docker`` if you have no ``docker-compose`` binary. Start from the ``File`` menu if you are on Linux/Windows or ``IntelliJ IDEA`` on Mac and then go to Settings > Build > Docker > Tools. .. image:: img/intellij-compose-add-new-config.png From a2c50b62e430cc829a4aeccaa5bdfc9baf5213e2 Mon Sep 17 00:00:00 2001 From: Benedikt Meier Date: Mon, 25 Mar 2024 15:29:48 +0100 Subject: [PATCH 0943/1112] log file with a different instance root directory add two files #10373 --- .../edu/harvard/iq/dataverse/authorization/AuthFilter.java | 4 +--- .../iq/dataverse/harvest/client/HarvesterServiceBean.java | 3 +-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java index a2cf3082ae7..c93a1496c17 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java @@ -29,9 +29,7 @@ public void init(FilterConfig filterConfig) throws ServletException { logger.info(AuthFilter.class.getName() + "initialized. filterConfig.getServletContext().getServerInfo(): " + filterConfig.getServletContext().getServerInfo()); try { - String glassfishLogsDirectory = "logs"; - - FileHandler logFile = new FileHandler(".." + File.separator + glassfishLogsDirectory + File.separator + "authfilter.log"); + FileHandler logFile = new FileHandler( System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs" + File.separator + "authfilter.log"); SimpleFormatter formatterTxt = new SimpleFormatter(); logFile.setFormatter(formatterTxt); logger.addHandler(logFile); diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java index 20884e3360c..e0b5c2dfbfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java @@ -88,7 +88,6 @@ public class HarvesterServiceBean { public static final String HARVEST_RESULT_FAILED="failed"; public static final String DATAVERSE_PROPRIETARY_METADATA_FORMAT="dataverse_json"; public static final String DATAVERSE_PROPRIETARY_METADATA_API="/api/datasets/export?exporter="+DATAVERSE_PROPRIETARY_METADATA_FORMAT+"&persistentId="; - public static final String DATAVERSE_HARVEST_STOP_FILE="../logs/stopharvest_"; public HarvesterServiceBean() { @@ -399,7 +398,7 @@ private void deleteHarvestedDatasetIfExists(String persistentIdentifier, Dataver private boolean checkIfStoppingJob(HarvestingClient harvestingClient) { Long pid = ProcessHandle.current().pid(); - String stopFileName = DATAVERSE_HARVEST_STOP_FILE + harvestingClient.getName() + "." + pid; + String stopFileName = System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs" + File.separator + "stopharvest_" + harvestingClient.getName() + "." + pid; Path stopFilePath = Paths.get(stopFileName); if (Files.exists(stopFilePath)) { From ac74b23a4e316e6f142e82582165d35f720604ed Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 25 Mar 2024 14:03:10 -0400 Subject: [PATCH 0944/1112] removed outdated "problems sending email" section #9939 --- .../source/installation/installation-main.rst | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 5f44ef1e348..9f935db6510 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -141,19 +141,6 @@ Got ERR_ADDRESS_UNREACHABLE While Navigating on Interface or API Calls If you are receiving an ``ERR_ADDRESS_UNREACHABLE`` while navigating the GUI or making an API call, make sure the ``siteUrl`` JVM option is defined. For details on how to set ``siteUrl``, please refer to :ref:`dataverse.siteUrl` from the :doc:`config` section. For context on why setting this option is necessary, refer to :ref:`dataverse.fqdn` from the :doc:`config` section. -Problems Sending Email -^^^^^^^^^^^^^^^^^^^^^^ - -If your Dataverse installation is not sending system emails, you may need to provide authentication for your mail host. First, double check the SMTP server being used with this Payara asadmin command: - -``./asadmin get server.resources.mail-resource.mail/notifyMailSession.host`` - -This should return the DNS of the mail host you configured during or after installation. mail/notifyMailSession is the JavaMail Session that's used to send emails to users. - -If the command returns a host you don't want to use, you can modify your notifyMailSession with the Payara ``asadmin set`` command with necessary options (`click here for the manual page `_), or via the admin console at http://localhost:4848 with your domain running. - -If your mail host requires a username/password for access, continue to the next section. - Mail Host Configuration & Authentication ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From 909244b63e167daf5e065b25d6ecced241ec9d42 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 25 Mar 2024 14:55:57 -0400 Subject: [PATCH 0945/1112] Missing env var --- .../source/container/dev-usage.rst | 2 +- .../container/img/intellij-compose-setup.png | Bin 45986 -> 52130 bytes 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index be4eda5da44..9f2b2648165 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -263,7 +263,7 @@ Hotswapping methods requires using JDWP (Debug Mode), but does not allow switchi .. image:: img/intellij-compose-add-new-config.png - Give your configuration a meaningful name, select the compose file to use (in this case the default one), add the environment variable ``SKIP_DEPLOY=1``, and optionally select the services to start. + Give your configuration a meaningful name, select the compose file to use (in this case the default one), add the environment variables ``SKIP_DEPLOY=1`` and ``POSTGRES_VERSION=16``, optionally select the services to start. You might also want to change other options like attaching to containers to view the logs within the "Services" tab. .. image:: img/intellij-compose-setup.png diff --git a/doc/sphinx-guides/source/container/img/intellij-compose-setup.png b/doc/sphinx-guides/source/container/img/intellij-compose-setup.png index 42c2accf2b459b0502d2fb42be5058e07b0e9376..0ab73e125b2897176750ac81baa8c8a6223d3aa2 100644 GIT binary patch literal 52130 zcmbSzWmsH6wk-q;uE8a^ySuvuCpa_~+}#og?(PJ4cXxMf+}+*fk!0r1Jh^wi@BQeH z=A5oNwM%yGz1FG|^hHh_9tIl*1Ox zKoELiVy^_RiGB#p0 zX1_>*r`WWgx|Me(n`t)~g8e_9vl1o9+bP6luaKMQCyS|TP&=NzG=8S4+Kl@};04zyAQi&DH*-D{4 zQx<}P6m6}YdhXKNu!4`(L@L5-I@_dO zQT^EuX%mig_!Z`N!#)!J5c`1g_nXLsgaFZ>YknWGlZ4;DZkC`CWOTUhId>V<&X~F9 zcMslLrVNaYarJVdZk7{gfZ0-!l9NU!B0&kLIw2g0Gs{z?Ekcl(!Qs*4OIRG@qMy~q zx$^}G89ouhg)aA%7f(&fQNWsDvigT(AZB{z!0emqKs^N1RT;1F!L)XcE$a#g@k9{5 z2+cn~aWPTrUQ6w4ImSggyFlTvoe;0#g)YMoj;Wk*=Tv;#;)1!nGOP7ec7UTyYpycf zXi-XKCVb_(IFOPY+VYa{PDa*0CID_Ng)KZs*D~oJWPjv7qa#{xNeP9Vh9pH4qA2EISsWgkoSgFXqesc=91m57a@ajoSKhL z__cLIO)%w5wU48LUY3YB@9p-V841LBgB-@I${sr4Dr75Naejd|Urfm)w%#k_e(O62 zJqhGAq59#N-*>w`*Dq$Tzo~1_Pn~Jmco@W<_`KoHGfxgz`@DBR(w;!n)onV}Kq!mN zo+DaKmP0Vwb9A$L!v1Y)LT#nWdqy5@;{GL~@KwG)lr+u0&Y zO9*>I6nGS31~n|x#6TGf2_bdbHrt`N@$I|UQGSB91dLrV`{C*Osmd;AEmj~fgPcK> zY?^_vWXMO2RXlp80(5e2`tTGhP1oMKf^bM<=_kTr*Pc=6nBs zx-6Nx9x`#U5*Y@pO*UafEQQsx2@}cfJ|EeoVyoC9Y?5G^@?5*VT0c{8`Ed`>vrTRe z?YUTQQY>l;4n4ax?e?BHZSWqLSON1aRHEq#x=ROfT<>}l?k#-bTYq;rUxHY6RGut0 zpn8Flva<4RUEl8N^5txA2P4vH9f_2b&4P_vHif0nuxLfEWO1Bu@}%TpTR4=Rzk}Bk zf2(xo0av$-3>)4x($@L2S7^zXF4JzS1~HNhKrNh*yGj zjsEf!H#R^S*TN-Zk8EtG0CEn!+r?$M)S(g`Q18FBe9mdzdPQC6ga=34czEVKZTHaJ z!u-a9NwvE;Jf}>K!6qk?o48FitVg?=j>vfM=*e@n4|i)0V$2oh6;Zj$?y7f*fMQ0| zOilS!FY5^+3irYF!?;&%2IQ=zx7K&?HoC_4ru{{FXjYc=ifsOiMc9n0LJ=smu0@2s zNn7?91DanVwpBahX$3R-9?3dgW7gzXn(b4nsyc-hk}!INC2k zL{%=FN;=*V{D>T&%%cIkozv{Ky>XVrOoB#N*9rG$E!R!kQXdKBLL0$+=^U87$MBmF z4`pXI`3K7#MTL8@Qv?!;Q*f6mt!acM!1d=TYur8P6NQ4{CMhXvp4xzDsF_jdJfgh~ zRNnp6WJ1KFE-)trlaf6}5V)h2nVEXwQe6c6Bare^-!bBhY#WGtr*$Ws+ zg73v>O}|NfCZqDr%0kilsrm2_v*Hq5W7khB7$HH{sVE|*D(Q2k|H<5f2ss_s4rXQc z^<}P@Cmfy#C8&CFV33Zh3|HBn1o}ds!=8D-*>pC;*PEYsBPhs^vuc3G;pvd2mM8v< zCpz$QH(F)A!~vf*1EBK2la0x3cQZg%;&Mf}x@LOUdb_1)<>88p;rtjiGdhPo)N=UL z{>*C;YA#X|KbWay(&+hNjd_R3<=K%|x{%F?V80C^5x6xjPd`60BJccox(8^4w$bTd z1F)`*^W{yLVv~q`anNHI54Pl3;cBX0-W$}Kl?0ZB0svVQRCHwIcj^%-y#{1-Y>+;w z%RI+UP)^O(1FEK`A14(Q42|}bV>THblV{5IXpM%R(p-;T8LyWfAVch_E%mI#>Q5oj z!w9+&o25Qyu}3^xvjAzd#>JsP0MyCoJiWw#+l_Ayc;#(Zm_V;5l88{r-cP(f`K$Q7 zMx_|%7j1LPELJ3W8GW{U$HE+=@QufXK$l~GZJy^rx)#$0Jh{~L>72)JrV_=*=vA*H zg5`y9b0a1z`zZ&&WAodys~@C4UwPALJS}nP>_b3|fYD?S`+gXA!@?DmL5{9x-~oan!#!AtmH;c4xSU3HtXs zQWKwn)*ahS)~g85%=&`>F&Od~Dt5eGA5Yad4&ife1~6O?29M^S<}r?8kH#(Q`>*=9 zvL8+>Fzlx@msx9QgJ7p5`N7(kej=Hmo9}gA(|zrd*2N{gJQ2JOOX)8;?$G6%i&r}z zXwW=BSCk1Nh4I|0Z@HNRTG0BPd&x6>n~>_`d&A*Omg+99wJ?=v++(VIj*W(Pu5!Op=y7w6*&*l1 z1Ycg5cw{#|Gjm@wn|Tbz8myWC^x1Phw#V47{&K^6OWj9xX3{Ezl@m&(`|5tbQwvOr z(|o;BzHm6YY^0&9L?$2z==`;LW2R1nu2F9xrdmh;*>t6fv(SCohD#e04)T4k8yGBH zvFYS0tIAbsaHDzyZqed5dLu27(-hp-QO9Mhxu7!Wg-Mb%D!_?Z;|5(&;eznEf336O zNASsRo{z~0a$n(FJNwx@ABQYFjQPpe76b!ZmBF<&?Yx1b%eG2?5Z$UV4=0R&sn^RI zyS@EJaivP9oTg^q84p-L@dZww<(SV8R;Jq3Q6C|hy7`f5zBb&oxZlRP9erdzA(%6l zfHxTZVqBTt1uSj_AN{2=CA#VN42ml zwJrLE6a|6r%;Djl%~iV*VYnO&A(Aoj$5JqaA}fBAo?j3z<9YhqmU+-En`4HbNEx5{ zL0WHiZ=WrfDY{JfM^E_kHW=9SCbr5MPRBfX?r*7r=SmI7(^d}9_(C2;Cml{vMl zw7JxinwKSg%d`WRpIJ4jp@@ylme!qr)$JV^^(TxoPF~WsiyE5=SJ-+L>q*UTyy|Dc-4%m#zjefKwW3D!HY&Nm zt;N1{6=ThFWjkLn`PRa8YN?~cl+rk?TJ0sRanQv&yUY%H`i8>MwozMlCj$?>*~)gL zc@nt0u?_2~s%O7o#N7=>5#8)lg%_QcS<}i7fyUs zZ|TVb<0)rbbsu?P zx`g0-Vn`zswjI@v5)pNqY^6hZzChQdUzKPQM*IRP$40OI{o&QC=5do@@})VdcAxXs zy-;PLaX3M{p3CN;LoJyW1synLRQ~V;y zSZ*M?%B4#1jI+X}{K+?Nr9Fn}g7M^ktcHzrE7<{In3zXs}XA5=zO=w?37IljmxAcbBmS04@0-t56*O3i_pph3@tdzvU^pq03HjF@w#h1{LAG2Z z)FJ^LGILcErWQ?iqLvlYq^4kJ)qck9%d(o#@O;hbG@4b@Vz!q>w)@%UWbB9`C=&9e z33cIatZ*(i#WWdWZ3h~K#^wRKjEdb}utcylfcBNE#xw>-V#oq4VLYGR1+{nqn0a8O zbd^Dz86QaT1xTy9zd+WpMe6j|v=)YZ-SrH|DNh{k$(XL7M$TEjL}PE{+tbxO?a1w$ z*r@6xIet$2`6)fb4*R1ro?lg02LZML-8QL5Kjy%x$hSRoqPK+HaXmACsX zl#g#llK(&e>dGgK7@W$*X=^4>K})K_{fX;`mzCX zmcjtpFy(%0PsK2Yheos>_Rob{#AyaKCx}rPv?=E%}SQ~DnJN)!0GDbh)ie=W&Z_zJ5%0=l&yLDdi zB{)R-Jf6;b&w)#FC1p2bMu*KCyyNIbQCUk!UEQ?wLv)OK)ac-8`Py!=cE83DZ%Ui~ z;#@cnt0Q(yzRcEWlrch1S_gZCITSOQm)vCUR?_RBGT-yPaQd-B;@a-nt%_>cD5gy} zh1j)~`goceIlT>*%R;*i@Ynv2z8zQ^-| z!|Nn~fZ+Mb{nlMjTDS8!jn>s>N?a!W8F~73A;fn&uln=d$b%4uP^#<9_A8uOZOp>j zh{-`!ZZ3bwz5Xr?%?qTxS6ngh-o=^Tps<>NU1v5K34(#$aG*dsNWC#qHa8CLqyu}- zT)5%2i)YT7Mg)jZ?PX8VU3N6GF?&>yz*T(50L7fbd|rhhlGOce*&L1iI;?2dx-fkB zfUHH3IIGzFoYIoV%mQ)-5GGEIpoV+2VQz}s6Q;&;m)4FgIAlqGH6(jdtaF`@FeBD| z-`@kZ|0(xJeM!`i@}h)FFQ@Xfgc{E3N{IUIet%fECTbDLZlBggS3oGiDiUu8=UnPI zbHHg(?$c<5?b)YNXf;qck-FyKjaC(>GmcOl(InvRYkA4o@13`H?Dw=-%m8ww>i)Va zI>1*lze+0ceP8WXI-tlK8}HjH5)P{D`_h-o4WX-1OXdsN6QkcXqvn89iKc7|4)rZ_l4I6kd;tws2Kx z6*oCE$DFmD&7Wy-@$)&xys3#Vp0(VZTZC>fieOo6f8q$2#dNS-f0<;@GF#Mb-?nx% zQpoFLqFCzlZMOMzrTPi~Xr)kkVfL1TJhraydpZ7&)7u$W9dG;jV&Yf7J`n)TVD?0A z=N)V8_>0<=gkIdAUR9>2#^)v<$@=OiW4s4c%#?E(g#xl>L0qHJD5#{z9EE<)TugUCr zrM>(ez0Co$=CD|+w!An}YA1{JR;YEwnpMV@axc%VSEqp8fcA-R%hVojX$dW|YZqI= zaQ-Tdm3^JAYgKmZ!ZJ^8NJ^(qQ0-_ci#=;D*X%;@go)J{7oA6KeZ)ijPQ^8Zhu{1A znO`8ud;I`IiQ`b#vF3qXf^A|&grRBI$j%sMB7PXvR*NaQVaksedld&bd6u40`FV2L z1?qylO+QY)>BxXa_?>aJT~GoDyptmQxhWKcZ7l8+gZS}8>=mv!=Mvv%jZTB#pf$@z z6cqXS;3z`yvMP}b=X=P?%BS`92SR^>t19j3+t8uQxQkgs8S5Ic3aRSwQ=*`2btAlLhoupkjY>1?sl`F3w>c=t3A zR{uIrw`r4o+3VFit_ayBuPNOg6VfA)^p~9Hs@R+CqN8Ic_b;*2r5Un;%<{!^C-X5) z%<~Hczt%bkEmj-a=Hv^*gTo1P?bYIGQk`M!bM?YQ`wppoF(i)v`XZxVx?Tm`Y(xkK zH4lVNC{^r^dTV|p7^zd4bWwIRU}X_W!)49!7c1Ht6YRzP+7?w-hBgva>E{kJQ0+-U2MHOG*vf0IQ$y`+lL1WKR`P5GL#}TU1SD7|>O9qz&NrWdIQ3UfDh*0PlaP{C z*k09PPZy~m4klrnraFa6rUI+MGE>$}o+D5~m4{PCZ$G8H0LXtLZHgSvHzmoXHR)Gs znANsUZx}w!$>wvYag}wo{PKK*GSe%$JZnAewbin$?t(N8|6pfUI~Nq5&ymm(Az92l znlA6;C_Ed%4F#{JulnG6=a{oaHIjL`H9xdkeDKHCFKZdT8f(HTtL)@2DeJG1kF1FiUeDI;_h8Q2rQ4M}rLMfNn`SD?YrjtI?Uk zp|L_H%)&6#^cgxiA9E=F7i;kfd)r5Q>#cx#v+$gsNJNpx$292l$vDRrsj7#y-xaVU z#y;M>aB>{s6dpaasdiycc2z|o>bx8@^!KE}hZ`H(u@GN10FO{YeZWb6%fAo%|+uD(Q{sY(}34w>fEffj9&!Qh` z5DFPmY70^_W*|Oh3|Zz}VoIgx?r49C$6_0C`Nj!cu$h8HAMKB0*U0v&mD=#%q6QR4oX10DjmK$Uysb|7D2 zPb5dGeWz{y*GjiYA8-nD!aK9%H}=A+^js-l|F6i_@=&u%=06(b?y zq&|T0A>t+dYb`Xigj5{-0*VP|5ZfOs<*2KM{xBf#O*8$szWdwl@1)7EC?Ao(eT&S( zREWorGD;Eam9O$A9Zpuoe;$l8`YQ$n^cD(nf%TAur-Wl7w}@ zfqNUHr$}0Wyg2==`&MnuuQ>qWXZTg#Yt|_-a?Kn82_@6!%N?kBRjx6wF zJsI@jJNbsROJ*{zflj+3xys40ljXsnID5vbnEj2vg21xtlYqGW(ty0Y(eNA?n#&=f zy}#IXLk;xTE47L>1;m5D)9nJ|BZJ|vz*gq?mG_qTB;m54S7i{o8ehLVs`%vUBhc`? zB(rDAb^haHeGnlRSMt5sNu$jSa|`E%KHWEsmm;WIV+*PFqkxG%s&R$Ci@_v3*CBoY&GqeH;3i)2knvWrIsi53d^=KUzpve;%z%hxDl~s0oUshKQiJTk ziCI6`S14u48*>&{tYJaewzAq`Z86`O0{B(e$kqM@xd^KJkh*X@?M?nz#>7X?*3kvTz2Q$<^WDn8MLsP$nRt*OAR(f~rq6kK&h zENVqbzuiV2-&J&HcviyH56d|U3}69fP(u^Gt@5&t`*%SeD)|sDjyEIL+P?@UsvDOPoe+`$rYUS65YNVI>y zD_`qpju_Kxht$iP-oJB^aF2N+w-hT?azk~w)eNJ4^TbsAOqQbng$(sjkILAw{9R zF+U zaP&HJP_q}T;mAGg--4K=Nh25W4jkv+WzFn2C zvsH=Xba77JeEtDbb1G+@^R(TOoRkZRE21{rI#aj0e4%|DHXiNq&Y^0%*0Mq7q#?WP z04sURiNn@S?jW7l5$ z$ov|@CIgw>#>+U9(+ubrWCidv=2PSn^(}hUq)}pP-|^6M1Ahd)V5V z+ktZ2xGl9gL>BHVD68|=lOIuvbOEs03!#-p^lr&&j7DRExFG2=@ox>0)aggvuDdrD zrmF!ud7nHO_CCvj+$p@?-*u0uv9JKZ1PH69Df+a5k-cvZh}DSq{@Pd?x0wn0@C4RX zsOUJtm~wSx%VY4}il*nI_z0jb)9P(D5#Yps{eB^qu16nj{R;FtqSWJcs}J2u)71j` zv~N;vjADpN9g7q~$veDId~ekBeV|Sl>*;!$C(@Ju?V=9_HKB`_KJSayoQU8UpUi9H ztj==!Wvv#u<}Q8zwI08Qr)}c?>&k$}HSSZZUMOGGwOr*yUhS%K{hm3~- zI7IWM0udl^K$^69S)G!WI|NtBJ*yPPOc@UOp>R8vKswje)5r)s^rZ37U*tHCCe}_q zT`e0B7;HISx;2*X!LO*l3OcegG`l7CXTqoE1&0Jb9LF5I{sKRTBJLIK6&zU@EM)ww z0N}P$Ab{00E+<`qUC*@vo;}yyLiW{|`P5M*w4^>f?`;$w7nt?L79KjRaHCgi1sDZ6CB{_V{InT6h+i43CH=Z>oOCi3`GcK*IrwHh)l2nM8uQ!4*E z)rHnAhV+?Unk$6OKI&xDED$qr%KFw3YWCWGEv2q^c!kbs%6jC)VUa(>=&Q!T@cZP$ z77oY+G8Gt6-gt@(Qr7|h@4&*H=%3y$6se=#cv2329zgA2<12Gr-dpMe*l zPwPze#a`v5R^K@uWldo3F9OLke2l;pxonxRsGtI zg(5bC+tbx4OUK*4LsbSWHT+NMOWB!TU(dMf`o)Dzd#5DhMk3ShRVPk!q3fi2m~RMr zwEY&vr7;b8y#;vu5%?4{8=wO|+w`rU4E;nlw++g^@T>PPas;sm4n*sa$@lmK%}RLX zh6`LPfhzU8ysXKuvMtA@0sg8G*7o+!;VUIidjpC8R+ypBf>!3-Ptqet8VT! z+da?m^D!H8Qa}A6t*-vFQ+S)SzuxI!pZ?$z<9k~NQcU?xh(m5q1re{Gpz*~HBByfE zpNiD8HiKZN2gV0>@5KQWnB9OtruhMCo0)FAN=F%a6$RE4zlfzGNA4;n=AyL@Ju&Gv zY+A3+er%u)Hint+V?3zg6I~B;$cm=OAS$Bo_(^fQ_}v`e2s&Ti^0`Rz&~b0cW6nYSzq~TkY>aELstA7U`JgPx+4uXCHbY z@XMjhB_vu2fY>NM`j4O+(!fc4rVmJXm_E#Y?)@7jjM1r@zoQ@3i+3R~uo5z+0umCY zt=F*>!9o%e6hYAN6jUWnDm47AV(gJgwthJ|{ylwt8PTUye@p*(kH-IowM8pr?BqBO zKYMZ@F~e?9@m+ZEgJEC?M@JDFENL_T%Z3~IK8pW2VStwShq;2pph15P|Ia};O5|Uw z|2vV;|FhCi#}z^*rbjeG;lG7#>!@i)_)lxOph#3m|Jmu}LjNhl`G3~^pR$DiuLl2H zrIlmun924h?df~L{=sL?H>PaM+^Twx8G`l#1O=oi|=sERg)|vI0JyNN$~iK`Vf+gq%8N9)VC1;${32oiTG7 z^GNlC3X$LBk@J&M--CA8U%Z`fR2f)Hk|U&PgAT)HBV;U6Z1+VGlCk5-%mprEuSM?$ zWjadhu%o8W2a1J7e=H>mW)Ne|vGo<}`H6Mta5mE#kQ3|t#>1Cfg#_h;JzkNJ&#*g1 zSd-3~;D_u9y~Gs4sR=uw5Ol;7D!sT}asRY~tC~D{_SmLgUrJOjNzg=wGq6yZ$T>V+ zk@kuG$8Y0$4#q;zy(r;RyaYPi>{>rdp#CD`aZeecjBNN^JaFL zmB^@0yB|(e!O~+!)up4nZ9S+qTB)A3grqOgg&Q5dq~NDY=Upq`j#)COF2j~83ZYC# z*NYTJ$^$nVA3m(@=`3QQh1F8CiA;rNs$<@foMg+)W*xmWk8z;n=$naI@qk?Q= zC?t=Lb=n7}6*OtBdwr=P#4yDXk$&-C_-ocE6VOm3pG+YPjkpw&{gDi;`h0NS#E6FP z0p_4wFqPV61~V1f$1mSF1K{7W@~u$^Tit+8C4qSAU`S#yhVyE2th?D&Dp=V+3vq^+ zHzsw4#()cd-= z55{k3Cv@!rn5>8qo1eXxR4IyQv7qc$ErF3w3tJpbt)eM)gNd%GazWf2JVykEq&&jq z_=Ohtm%>sGAh3*JZNExyW=*Ooc+|di9 z)twzczr2Rvgow<+Wg7m4+t9E<|i z6v+FWDn*j)25r^&s)S61V6l4>87KYFGer%L7vu#Wq+BG3WVe=c2*$P=R%{j2*NKSe z&;k0ll}*j0;h55(uA&NwN3lmSi8E#D012x|8DD={Q}<4avJ*UiW=D)Cb5<>x&Wbm4 z{C0bmyA@CVv(noMv-vMvGx9DZe>|{R&c^Z$@k5YW4BA&!JQmayTg<){{^%nrHCWcX ziRY?jM$6WWo#`^8-Wkn>y(a%Fy5j%^(9o4{why!gN2i7h*XCa@2CvY_;fanLHuUK1 zFxCd^N&{?9pE1mhdAg%K=ug3_2jKB!ggg!q`m9J;oFf|nTeh7N9W5XQE0R))<9QKc z6?Tg%7R$4dstK!74V#%Zv-5WS-al!Yb?&ai@B%0DDsE$N)Hyv_0!XVmW&99 zSg@o&^BkXkxC{{v3};hgbdbv5-nT;7Zm`+Ot#miZ)dT;c44X-4JVV9J?V#n#i5Ad@ zycNNN*0r_8ux+HIY;>kED#!7e(XlnaIbX(T2r9;jRAiMm6{T@?#AfQ8VMAQdO=XQFnXdn z#(FII%}8K*Vm_ot%xNI zPW(vz@Zi)TtnbA*mzcS!7(2*|s(scevlgp0O;1T4|em6*@y< zkj=Bb8=uy6NBaOB>FS<7x5jE&5L1En0xqR=7z${v0&N;JU{>&jaUnrLX|aAca0V|Hgp zbuwDDWl3BEYn4c~JEDHYnIiQEzV<-e;Np4yFFGI-RkraNiN}Lo8-aaVvoy!3lDf|8 zH{zOHO}mh}ES7gnx6e#?;@wdD9#6P*4HvA*^%7;aKb#+aZTtBmcc6U|_cI{L(x*+L@yN#N&b837O{E1`Uueckc#pS_Li|gpC*Vygm>}MLDnwoQ364L(Edn))p zQQ&6mM71|zt0@e;#*5I#!^z;h5tdrzv|r->7bXjpJ1u1%Ja$*8x)>-5eOXiBL~q*)mNE{2!*0N~mY9w!AbOr5 zxcUy*rdAT|=E9H_cC+R+&XxL4T+1{)wIUnf|o#v+`f8F>pG} z%P*8RCFt~tO15j)-f-@SV@bbtQSwD0Fvp>Vqlg((%G{C~b%F|$iGV~&$Y6w}0 z(Lq2wmYGp7c3>wi)+vYaK>*J`V&9-adcA@5ZYq$G&U4_)cJz~)MvARYHv9V< z)2Bz5G?_>Q5my;`Zz@`KHC%Dpfk6`24u&9{@@UPL*5|mNtKQi3YvdDrc0Scgt<^oE z40NbLRnMz$8;s%qv#*X3_J0VS_FIe`K-Xr#PalZCNs+TM42zfSY`dA?kz6u~RpKy| z{+p5znyDfd|C3?q3Hmph;r}av*zOD@D)`sQ5|J;Z2#9;9s+No4{x<_djla^>|a}MC080l%0`ha5V_5}@1VA@eYybHy)OaTo7537pw zJ)d5FPxCl<_qiy7trg43HUc@IC}G|m{|8iUH<|bTO67jfdh&l(sp^X=oSW)=BSqiC zg&=|X{Uo{Q_b1gw$Kd=vO_J)C!VZG;>C@Jew!#Dpysuze`aAQIiWiR0tGM-ai6Ngl z_hF?&D3ull<`U%ZQIiAG_bka)wLliEnkV4D_?!RKu}L#@WB6tt68M?J=dIH>LbO-6 zR#l0{0TS&fsBboaXDiwr{A#H^;hmHCwuN_$O z<65n#{>Zs#!1FhZ0S7Di$*vLO_AfFM=@~biGsitQr z+R#@mT@{R9jUOf;=#^c{KObj*Td&A(t9EVrBH88vS5H>{ew^TKT_7BOWwUq7X>0ae zO=6B-XGEu&$MU({n6sRk_O`?%pm;eXIB38RoQ4!@Te&eIn3e52gE$*!UC{@ucZE8N z!m3skOset(SkOik278iV2ZyLSmGqZTt^*BGXhaPS<7_;>&1k6>aYp;yGo&_R zZF5s4D6{S$OO%$aT^wKw5q(s`-6?}A*k$M%#Sadlj@J)ciZ@j^*9`xiykU)*RYs<0 zUeKT3T53+T=#=4m;T36As!SMD>+b{`;c|NH=Y+d|01f(yz|KE-2!SzyhaiWc1mbCYTR5bm=Soe~vso+Mw9 znvF{w(d|XGU7)^aCO<3cx?LTXM{_V?DHKvI9_-1@8oB!yFJbVpoT*oYf+;ISGS8ml z>sKF>OMkYxFD@EjB1SZal@b0#OCg@D_w4McH;&dIoKp@uapb2DlU%<`AU~JZSn&GK z3=nww3svo+d)Xq-JlOWyRDyUuT((}gTzVXT;FZU$8-61FR`Rjp17+x<(f zUc^#Uwp(L9kdR5L_VQw-LFb*|yqgbfV|~yvHWf=i5Vc5-+oGnIhN^N855+70WS`@ zBWzFVYu@tbrp{FRQONywl=xXL{PW<&>@&+oLV7BQ6d0i8@{d zS2~EMl!li%Ie@@B(mf+ZQZl8Eq)54BX4w(ZdF=w6KS}&@9^R4x*Ll}!PS!Iu$bM=t z`HcYI%jH9L_zV&ixI}ZGN9M+Fy056TEJ$j`PcA31zUrdlEE#iAM?l_^aBYNhzu1Rs zI`L+j2}a61z<=)Lz+9&b*q-;GQOqhqGoi~jBvFiU4OM7 z>X{3gCFB5XzTX22OS~Hb_gdY94Tt~By;c9^-srjp{YfGaZt(GjBWJ8eHn==S23H*>CUb~0C{|G^4 zvgEc~3bXaYi}nksxfH(}8m)n#*{2N3C9d64gmSN?rdR-0YoudFOP4wSG0B7v&+ko0 zOKlGyrPreKxMr&$bD_NC!~t;_BF~HE4vGnN;RIM zlt!C1SSPIA&|BRH5LZl7`T!t?LKrca+Me>LyHhbEOHeiIx==Cu$Jq!KE|0CASe3*g z*=Bza!QaOurh*8bQ?rqxrjqV0`K#+|5WWQl5)pZmH|~^WlCuYR@GjnYInYFLat~6e z@z4HKw5<28W2SP(SlBr#PX@AwPJq9kgL;KibHi(Bd_)H6Z@J2H~q>y2tp~~7K7k&_He{!N8zLqxcNA%H6~X# zq8onD`R~&FBAv62!CeSyhBe*Zx;(D$yq2&ja+p*Z8 zslktlb=i*|lI;@OKPNJ!?G@Ed&k_4C(6<#Z*L*~XacMm#QzK2GR#+sw^BH8@iusK}hJ*VP&G zc)dDTc2agElvQcjo$sH@xW&7x%Jk>q0Nc)JT5UJ`AbCM$7)NF-nIou7`0as9N8Sej zD#~rbc%WOIX!tSzYx^Y6QxE1;f2fJVugh~)`_tM6FV;NR24@<*pjQqsu6&ef&a`*I zd!f^Pw)>JtJY__jm3vN~}5zco^s;-ZWmqYRDprkeM zY7zT81WfxYt%12?Udt{j=0a#4%aMfHbfgA5LTsF#XY}^hTYVd)&?+GIfHId2;XXv` z{pK@AZc$Pv#rCuTC@A{%N^$?loFD9tjKeLy4Hv^6|LNp)U3>O_lzpdhkWrr zzd6!RgG1_gLx+vF@i;fUx-}Wn0ebam^}ZH{`Cmi?Ax=wHNj62Jivc#aVO|WYA$^A->6HXz*av&i!9mv)1|9ih} z%sZuiG=ECESy04Yy&wY6f0IrX6=m$VTm2MqaZW0?``QM*^i}6;QJBCtU~D-Rwag|a zfEjX`AsD$8=bfSrDRUTD>&bhz>nt;;&S>TH-5l20n;z%e+>%6$s(2g+e9vlqkl!VT-}wAomgad}}f{U9FU>2ZE`=Gh(p z0ayCJ(OIhnpwdPN0LxVm%y6}8B2#-zoyK(KtQJlb4qh)Vs=POdE>W}RIlVbu$$3#% zg!0>Y$5%YFc9?12^BVA9zjNIW-fm$ecdb7e>$PO^#gH-4Do!*<>Xq^@w|040I;NiO z@ZC0@+qGTu=CG z9$!^gmnRZT_#1zOTH7Dks)+xW-1dz5t>UZ^loek(EO#$$oauH`1otO4@v@)_rE3-J7iV{igJGr@ z_Wi(6_`%?tbcMFA`PDr|jMq{lhv32Yu>Vh4ug=|S$q4dGea6D}yW^s7>7y?cCsVuyh;cWY@At5i9 z6K0_BHo|?hf9m+baH)6=sFbQX8uLqDb@ZEBO1b@K%t8X4Dd4M|TSc`k*_Cm%XRqwxkyhfB5yBUi9z32ELnN&8F|4#A$ z{BtAnFQH)St8iTpCoN~A{`*TFdJ0vVhE$c6L(iN=1*ciyARpY&|G)_CE}J!`o~A)h-(ipeP**Is5N8kHaU+ng@>f59kH83Uik1YUK@0Oh##>fg0A-|q{>lK;h0 zV(HwA66l_mYQBbiHj$bM`;kRu^mYlAwDYReI(0Y-_5^PND(7SQla|W`5&==PgZYA2v^9) z-{Tt|MX0$MnsdaJ{J+?H%cv^bXl+zQR6>xF25F?bL#08wVUd#3-7J*ulI~VIq`SMN zyFt2h!MS%-wm!H4%j3VKVk1kJWY`I6P2XKWg^t$e5YiL2c`3p68X$d#lVq$@q%693;zTyINJ}>d(91$Uici@VF%wZx_ql$!)5xIFwP6`SN z>Y(qVCLJbc3zLR1spW`j;^MDB2QJTDJ?xY}Q)1-jXXevrm%OZB_%^lMX~3eK^io+F zcKehoMFzsa`NI3JVAH4xo%H4O9gg(4P5lD+x*78e;;DVdHG3WymfBSR2e*t!ICrHqE#Sn*-G`J}^TyDW8O-4tD9Np`) zVA;K6ug`|ZjDxI37dtjl$M#_J@p!AOkPlOYf{%~DYjss0df$HlBr*|s_l*^6F3|G( zU8QT+Z{Lf(=k`BMosU^a3yX@To8pOxh}`cgD|07)reu?59E@9KtSG;}487)IqDLP5s5HpgV7aeL?HyEuBKs*uk_xi~K_I#_NaS zef<0;YElMO+WN5ixoodSB#1%Uv5hE@ElZ~QueZ{MV^p=JD~zPeX3PiD1jB3XwxZI~ zhR>&#K8)u|U!U~ju5WB)?DD8WY$!Z#&WyT37?~32AdWMJpHpr~5_4_lEUHJ-Ka%kg zacY;u@w;6dZKXjYA|q`eyH$(<-#lS2tgknOZ0fas=1a=RpzKZ-ec9;4;vzs{lV0<~ z7agm$qw0G>jf*S_w8aEI7Z%a1J}svOX>)Tr?GnHK4gI;Q{ehI*mttYr<5^-(#SJGk zO?M|!CDd+WviT)ZGuz{SPo{_Rnd2czzC%Qv>KePiCt+YnB@trC6bmPM?7PRUqvyQ~rKzc@bEEd`Z_{@A>iNEs z=KHpM<$_lT2(9~$U;*O$p*H&o-d}l76sV%TRe=%zD)-6(V~I@2^TT4U>U5A(=eES| zaM1&YV-FD>14FIZlW+<8z|YOijdfrH_Cf1-{Utn|_HVDf!U4SMU|I2O=)=dmBiIy` z$3Wd>OaZff!x>L;^SvLU;FR`kWPJ3Vi)v=Q-RasTOZqqal_}+-ewomWwH%=xbe%y* zKgu&g0k2q+^R(`0JB3WOZ-jHGlsZmnG~Mm5-2Q~`AgT88{N&tzR=|XzjbYL4*!$IF_?qC zzJASmloU8R^TE?oV03hp@9s#A$laPwH0v9f4?h$_Q3!;t&eL36R=uR_HRpT_ZOP|! z!~WR4r8LoK%1~Uvdo0nAnipDAT+T;N;vlxnS)!qBz$V+)nvA4B{n8yq!Oa~gkdELjV^4P(`9dDj(AJS@!_nhAaI#6hM>%#FFQ{-IkvXF6X)5~uBoi-3TDiH(hh zjomMV%0J+7x!~Wa-@10pV@}H|MqXiyVK?rzfDG? zz&isY$*kH`K$5%VnicdGpRtiGRWuZD^|)qdG>aE@bKVZh|JZWxEYxE^+)PQG?M|WA zFUTx4VUJXrVMDEgr*_XT$tlRmk1m@ZQu-O{efH<7BV%F);ZXT{$in%+=~o9r=Ps3>&8;;#2FTb=XC#j!JOIH6m;2Yv*K=7 zXB7PM0_UZF>N*YGbOz6u2{sbxHC54MUT9i+zV(RKo?k-M|LwY747<2U!?$B!dm9Zp+c5~oumf4-xpFd)pHceDfo?g8=fYL@l;90&D>0*;3=8*^68UHgs|&d2&h zZhP|J_|OvoJ2;>Jayi{xUtec(_^Z@(^H=JVkPt5a4begKgC~#uwpaVgJ3+zbPZULT z?{;S^@vE0Sd7B>uf{)j_6_u34?d>ri?zW`nUA7j?XMZP9NX0Ulj*%ZUIw4mW4;xM7 zD~Axckgcq&fFWN4eZW9N`zSAuw(Ry?d`6rAV3S^h)hH@%`x%=J&i_t4 z2_fW-ZS`ijHekr1-=M|>NM$_#96$eKcRhs*EV4(p{fP0Bzu-d^{b5OT?eF%L3 zpvVY}eZx`PDJJInc&Xl(MVBqD;+j>-w+xT~@XN)9>xQdUB(Jdi8^2~>KR*`hr<<4ZNLT+j#!rEP4`?6!ssS33fQg@s3&!NHl& z@h!gC+Mp?$tHF#bNmfl88*vZ5pk>P~ANj7=x$xLbdcffH4i8^~;LzXvg6+jdUdE=Y zXTU28PdU`t@3iqAHuR@BEp~_m@Q2 z{n0EhS{5qd!euoS;`;yPWkjyk9eizJLJ^no1@SfZCpr z^s#B}k685eH9X*vFHi`4rgljHkekv@`kYLStHJ=~ z?#N*@&c{O=KMi{=SQ-O^4)*&ST-=?xPg4a6WV+;ZbRngs)({3N#XQd*Fu~$WO&$r{ zr&Mgx?dBX9goL4;`T)YmaMUkolSqrmQ`6BI^v4c5^H~DCSHPEQrR^68_LzRfOgx7L z4F!b=IR%By*|vP{=Y&rs+Kt;|IY@I=7Hhyx7y{1~Lgax8s>Njp`JYUvvQF37kTEke z3yX=31AGAG-mbH9RyHzv(b?JgIfW+_coLi4qUx9n$17K7(J+GEjW|?8aGDtt3rjhE z8=|JxGFxc|;CaD>q@*MjgaKPI*F9k2;2i@)SaLFf!1cOfox|?OGy#w4Qaw1a@Z9li z35*x0L~WBrn!P<-ebZ%M0>i>6d3f;R`Z>ef&8u{oa9*LJk^%an<=+*GKMvfd=hX@? zum<4pJ~f9X0zjc{mepSX?Euv*H;ro`mMFvm17P<6Gts6iH|p>@xO0`*a}!DUeC$cp z6aV&)&7LrT58fptBz#U2NCD+H^*NHtG{O%R>}w%(+6@7>hfpujoHEM=9v-KIzDJf( zZYb5$)6-&J1!&>Q3K|%kxEIAuEBc0);_4+o7PQ-1y~%-d zN%m$c{9og9SUVt{wRFsr{4!e`gU;hl+Z(9oh_pw3z;5;OVEDQGH&cPU{_8!=T$dsqV&x~UqLRjFG_SFw*iZ`>TB$Qgs z;l@NMt)8!2KIe@d9!FeIUP3Y3vlJPOVIITG{gWVA43X%4hOxjHu--Ea4ht424bYb? zM}mWg^x5&~QkXYGRxZ0mB{Ld7UvgcPT_SC6+yBM&XXo(yK67{y$`PXJD)uEL$ouu- z>3l~m!9>fyaqSp}N&US^>tuX^ts*B+iV31^HJT#-2SBdK(evipr7Sp@*LlLG0XUofwjWC%fze(uEC5)H z(R_`Kfw3_LF0OiKhq;c3;QRN1r$fA`6*B`pk(~M*n&MJYNHv)e`T01=O@V)AW@h&0 zs>1>Ldt9Y6=Jq>dzHvI9xZbS+qb9nCi);c{y6Avfi^!XdWixN3I1!bVmCbwJ?oaQd zTpTP&fqfp%TgBK?Hj@4k4uyzMSViS+WTZSeFH%) zQAQmurbvqSJPQg!G?}kKOCSFkdB_J4WX*1I{d*c3QhxpvfR96d|Mmcm9|H@^H$NYM zGkIV!z-Bca&7{cBXEE&a55d1A2M$UWG`=5@)+^xYL4^cCG&D3EWIx1@$tClz!3mQ@ zcC(=3-MyveW;U+Uu|_vkfO2|zxFn5?D2*!MtLKCO1;^I1fA8yi{m5j1f$}Eyyh@FV zLMK3>0+?~56Z;{scoz_B$Nkw(K>E9Y*}4WCvIgv*n&t0EGnHnX!Pt!Y2lIk}4f*)^ z5LDyE_8RE#&sI+FJ2KQPe*zv{B2OkIAmG)Pov|FyCbmH#V3IBu-53FXi~mdslp@7E znT+)GCt6dciYP68Q)MwXbPv0|-p&fgVcisX^yI)~1M3SX2s^`2 zS63&eqPlgfTwPlOh*rNdXya!-(0#;rDqk6cIeh{Sr`-Xxz9Xu|#9%3e4$8-kd!iHHZC{2SwwP(U!*LIM&Y!-Hq(kCC@^#VUvk+-ti(>hVcGW10Pqt?T81$W*Zw zpv6qlRL#QxetwdWKmhidjGjKUw3G=bCg{1+Nh+Nuzn71%y7qvZkOQ9RlXyVP&!5yr z16RpKn$;@x-xZ|8CZs$H*seX##t;h*2g?I@$ixgwu|GoSfVQ;O4) zM>QLOuFA@W5BG36jc-(*%#}==bMQF){hmoZZ$B>mD5HRNs9)*F zWHGB0M!*de(c*}ylg3X?i!XIbIx0u8{w0WGUy7Gb!_ zomi8*z#P63P~UXDc2S_1KAM1F#wM?E(>(bszj?sCg+=`y0fQ&(oD&FXv=8~ZhEo#b zVOosdt>QK|Bv#EZK@Q9L7Ias&QbS zh@f{ifNp3KpctF#S{RIGihzE@m8c3$eN@H*nlw11oAc2;{sAwJP*E8R?0WS^*ViQt zCy3nK>VSU)1CYkZ>$qnCUfI3C2(a|`0Qj zrj<9F^V9|}8LtkTHVF#;PbD1(t5RJGx|r&8kc{@EI+Mr3i9~|0jPtQ+E~nitO1Hxo znh-g4O%~FcHYpllW_T!1WO;&{65IbHM2ixP6zQ;3OL>-xE4+uG`@?#C>ZnvFzDwtI7M0L8BG!&e$4D;mM(V@ll?yy(xi zSvVn!Tep;e<&lEP06@753Xj{7}>gB$gSO?pGgOM#)G8wtj# zdL03~@tjsfL75TJ(d}2qjN`Q+fC+dm3L#GiDAV&vSlCC9UhAdz$B)Xd%@^uIv1r{3 z1PHmTy}&wBz9tAi=~KBoapnW6G8!J9BrwF>?pJRZ;(|v;-f&ng4geRgXh|KWpn*^g zid%!)>tsMZg?Gm|a5`M{%_Zb5p31nHuOl(*x{Klg)I@M~v7b1Q-w&mau1tz*`q;R;Q}!5bC(@dwdJ)HOMp_ zHv$Ty(QxYEYG-g@z$-xNyO@CZ6icVx2Z%-ivN#Bk0FSG;H|<}u8boU{S%@nYOGm=V z2_hZ}v(ZMzC+oe1I?apFG{Ch0UXfS!-RO@4Qeqs$Wg`ZeSRffFdyxqV5@1$<5(&0f zG?+9rEG!8iGR8CMJ!^D5*9TI-{Yn7vu3!d7f#_tfm6x6Uykh1vFu*CiPM^l@Sy(Fq z!oyD)vE8>vNPzI5mFlv|t)$#(RB;Vg2{d9-G9bfHUA(rktLMMZFjCfHR8|I|kCkx2 z%~FTm$GFa;XpRjTdBki09~I*rasx+dT!N7-q%e^GYxu}f*-re4JrzoKnQrHrjdwHc zVsGqqKQsSyxQY$|WgV2W{4IaCCu8?Za%UU2Fqj^za5s8FMRcKU=z)d`3FG{yB%hsN&)oO{0V&(SnX{Utbi?|_|JOQQN29r3i{W)XcaT9a(nDFkl4jkP~Ml<2FL}9N* zjw>w-@+E>f2b70^8?+H^c5Y=-429q4(S4hUIMMhAivLOT#a zVm2D^c=TjI_=-+W9spvB?~J+rN*g?|CZ|A`DF;5EQu;=ag_RZ7UiRse2i94+d)%

    6ClIT#FkocsR_bd&&*_)f$|cfq<9+!X9^kK>%DCB8maRHic?s-{Fxq$D3d- z5SvacpaT+vN165_^iDfLYPw8Uem~y_eFEIJb~AB>?{<$B1jYTpejE$W{Zi@oL!i1{ zOHZV#oW$~l_ct&K(LfI7zr6A@q}Qs;9)rVe1kU}@H_3);TMi)sV4Vh*v3GK^IRG95 z7S#(NVqzyDF1`%d@D`A8K`vbL=FtjSM`zAcM3sUG9To7kK*Lih(�n(wWg((VVM zCx}Vr0aYD{gBY|bbX!L&-T;n`^XC^U0OwS07klrujt8WwEwyU*%KK@N zXHJUFD&dj_RWb-LJF#_R1{;CyP4yTV0fZlHW6|*j10$oGl8VP`E5tl5eUzxEk(%rO zTSC}Wu~=(Tnv@=d1>pskMxA$Jyl?|=9A6U3`Bn7*_{D#wrYYAK=UvB5#P~9Ah<2ga zu)unG)gm@tR6Zrs-u*bIk&SDuLy?@_!GwwE!LRWZTo91FzBNZXK!Tw5;(uEpmA$$& z?Uk~!O>MIIVnd)=Nn`Bg5;P5v@li04KmzINCWa0G{2v$`Od%kU+S4OmUQyw4x!`oY z^su;DIYoKWLxu{62x1TS{;#^ayU{Q(9@#}kM#jCK28aeX=gZ+%&+Vt_0Lfn!1TeyZ zd@tYbM@~t3JSs+91{U=gmVe>d--#qtfvXW>>KGs>18cSlYGI0tle4fO7EJt2`+)jG z=7>EiIk_WRHjIps5j(o~@?AGze?LG_<>a_~yw+*J_5dfr-$ zja}So?)DSCN{EQqPV--Jv(8MAPU46u&a^i-3C+(3F}R!l4^s|pZEe7mijA`Yd=sZ5 z@Z+QR3s$|hf|`XU3L7#q522y9_I6m&dVYSn)1fw{Z0Z1ze1(%ZEV!(fp8)$o3N#~$ zXv#9HrNyyYy!u+$#oYGDiu>63_|AMSD{wJ)V0&+DVr_$X90oCQc;-(mq9!f?K0%mo zzy|?i`nM@YT`>r_7y}^L4&bcnY$ox?PoJJ0AKMUcM-71|{UEx~1-lgNIasK-sc%Fk zbme}H$95Da0>o<&%)W-0=AR=KhmMcm4X|Ykz_QboX2v^>JNZXXI?DZd zfUaI@IxhMGwIK~x$3nL|jPQ}#LLedeH!2Xf`)h1)Bgw_391Jv=POa3drlw{rO$|)= zBja>FpFjY?$pv>14g{;3#d2Qp5uG~Lr)W32Mn*=q{+%cQ%YX!|62J|hR>Jj;TgTy0 zzF{m?NMQR)IuI8L1nhmltRdGgsJNDpU!5MDyI$X&4ukj*X9|x)+t*5cCY(FFDz^P; zj3p1$V7jxzmx%K6qd)UYsCGfD=I*HdGKBC}ZinPyXY8<5-zzd8}u14pfc^n!V$Lpiu3C?$xx44IMo(Rdx#9@${-$^#k8c1 zgVqbM4WHEn_c9(op@}5)V=1ltb~bAuu-rtcP)s1vmv3rL!#1*=nN&k<|N79s6VdLR3gySZc7ragfy1TJO zg<92b!@D`98)B&Uo3`dY)14J_jc$<2IGYLMY<p8&Ey9DSr* z^;mv`yEs{zGP>ER!-TEeabF`Ss4vyka`o<%7wA10^Oo4ArfvxU`~lPacg@tYkHp|N7m{R{xgV;gWyQ;V-VoS4@{uDq`99kpKcD4@$!OT`jL07d@T-Zla3&KMw8O%24RwV8cKI)X9IjSFe>^t zXFEAtwtKAsK&>}P>;fUTj~W_yz>NIN{|3z6(WIu8f`Y>L@tyUJIGxQgDaIVdydhv( zSfTfPkFXW!=-Y-RBDv|4$2xijn>0b)PeX@9u5fNk!60T0468zU=1KK^31 zU?7RzkDdYn@1APP*x2~^X18Rt+DZqAa34Sb7Ca~5cRi~aHl87fD95S=y~gglyaK%1 z3iIt-W`mwLKv)CV8VU${7+(=6k0_&vlY{iy3e<2l(Ayq8-Z{e9n1nfG@;ey%+vzEdzW`ek+4ahzC;YlJOU`>mR8Z)ye70=Z>!}S?C0MyFObbu>1;J$6A4iNHSIy-=J_y; zxwr_AR<9QsuV%_G0P#WNB;C;9t*4Amx0bH?XR+b92i9ghzCN3!AH=0f_H>cdEj1pNDNw3bfY7#s+A!C1<$Ujv%lRJ*JJ_ zZ)5kePoqDMUh`2;EK{SKHm@2ZQ4)P9rA!*^0tB;dzSa&lSF^x??014{<@313^g9tl zL@V3IM~hUJ?jnHrHors$vJal#T_A%g+bk)$_sQ zZC&wg3CN=}Tx<|jFVn9TJYv+01jUXzYNbFL=Ej-pjaCtLJXuy*9U?W~#&Dw?lVA56 z!_r9A<)61FmH!s3-G6=|(n)|T7V^s{nE>CxEzD-w151&s42=iAHYTmnc0QIp_8T6$ zv^f{%*5(@Z5=M6Z&qS4d7ps{DkbekrhMq1TyV}%yoN|D2P2G=0oDMXr5ard=<(gE} z3-gNIJIl>)Pv(B4At`u%hF*Kn2zqDV9d5Wi?>~|AoGZUme!E?>ajL0)y-yqWG5z;5 zc;boov@c7hq#zIi5Ts@W(#&#RnkSIE+jNTALmx`q1(Ypx92wYCWR<6?^4Yu)FNGa$ zV=Z(zhI4aw9rg(9`!-{Lq;+Ubcqm+JC=29$n5wPgi-Yc~HF;dqKtu8m$`bpiXEp>XHL+OHF>1#`v*WpO_2#zE0AaS%tJ# zRe|Y4dC(VtmVhco0(LJD9$g28ENF7SRsGt5$Jqa>kL9x^4AFmNsH_}G5uV0>hS!yJCpo-AX@^Ghr16ow zzXe&OrnDpIHsC}F)-|}`D}WUgpZ&LF8z`mwzgFjZ1GuOQ z59LdEGW5t~<-Sj$e;Lg7pS=Zr1BdUxIqQ$A9RG3hJlV}4hW_z6$PfOnx9$CpqxoOz zQvHu(@IM}@PA;daED(iLslWosu-_5;e(^-TF@8d5&jz#;lsLG-wbzbe|GZxY>6|D6 zM~RFiBjabjebcLzJg@ITqd(Ct*)mM=9Fty`Y$7-g{qxE1`Px}8f+>7TkF3D~_E#U2 zmD}3d%hW!7Pr-WZNbS%BSugz_cbBoFe0g8mMl`E%G@#~Y9zVy-Xc8t(4LsZAhpJ2GBV$%hjnz2gLX zGpjb-WouJmwaKM)I$n*JZ9MObiGEM|E&Iy)v+`a+BAK|j40--v zpIv85cH&*G12#_r!BpZ4RadTbeR5Kggqpr?ga}k`_T*P)wNoJ^!oq6dF~2~uUKZpE z4Szn>xNbZUHv^$phxuI)1O-1&(S+7Pb04RgWXPO~T|ih~rW}6B&ZqGu1oDO$eqGnX zmHCp-o}ZAOVwzsBrpmoL`#rRLD#N>$>hgYzG-z`-d5Qh4*=J1O(_Ci7`heYfG5wGb zUwY!>KHlKoKP_W>`Uix(QA3mZ=CR_x|D?G*z!LW8zDHO(ogUJnGTjpkTFb#P+1=BZ zFl)G_*$`WRnw0|ia)`WGauLxrP%>mdhsgfRF@7SI8`K%qvpK$)laQ zaW&^@<;46zqxRJG^Tl53O&n6Bx2!wo;cC%&aH4o;^}%IXb#JsG0dH}?S3QD!gjQ6T_pRUIMncKO zRerADCL4o$;8rS^S6*uj-{ov0g!xp5@3KPxBJY9AtM^<;gKn)Kj=uJPTWq&;H?H(J zc6VpNnerv7bL&`YlU7RM-4erm*Cd3Pns_5&_K!0izE=kBYb2_pb+}*C_AOD!xu-Sh z*O=O#XUzt#LMF_25~-d?l4Ge4sj!@_*)i5{*hC8=fh3Wj8fc(=9Hah12){{gm;K+& z@;gD<6SMr+ZS^*X1)Qwr*Kdsoo+B5T><|tM5-oWz%>O_eZfXe?dV5@=6P77-Ac-p>Mp3M~nvZvlthvj_XMGBs*5trN%zq&}9UU^1${$q;0Rg)y-cyrFvZ8rh42{fO`ek?75k| z8CBnAR_zHfw&QL7TW;6fC%TBX8e{Rj?+e4)vjgLa6K}33vYSL>Cw9|KJ5NwP(9X7} z4p6UVVjA|5MwL+DMH;!czSE}wgf3h^^o$X*Lj10J%e`Zn1?oU zWv>|b({T7@=`Yu9{7NQXQ!MH?yDTjHUY5p7g-g?z<0+P}rBE~Xjb}0!er$VD1v}uQ zprs^xA+)4)?Kx2$OXXwODUnJi3V%84LMP6;y40$5W^^R9CNf&sG+&LvkbAnIES|v{ zd%>?@LSWivXF^2m(Xe-=n_ab~(q8tpf291SR^`S(G86~f>-3$SaZ#>#iI%|cJLy8f z`5>8nI&kuOoY@9r>AoRt@_i9gK6AExka2h2J{Qaodg4+HF8MkMu&LfTA!Xo$O?^ToEu$%faDg=q-XUT-*_tVe2L$|(6fOYk~q+o5*KB|@dn zQ8r=^E1p^y3Wb|{V>C?3&yS((`yJrnY1Qss2V#80_4y9$hwH#yv5@s0Gzn`1jML`Z zWa24f;;CE30=t48waRQ!VKkL^Np0WNo-D7P*^TyuLBDfumrQ7i(=!x8=M^w4?4mZHekx?ku&^}mu$Wf8%`Y4Bj$aXqu5ax40wYBTXkI}`*i$)^>tAvu9Ipz| z4IX%*8Wp}+DTI+7`D$C|D$z|``T#xgrAl>^y=RFL8YMwE~)AwN# zq>I*jT$@+I`tG9;`jd?4G8;INaQih<4IAfv>}gxl>A!eF!>xh8#WjchJE77df_$Xk zCoSf#{mm;a$A>r#3St|{>lps6gE~Y{#V*;oTLeQC?jB`PKRS2E;freRrCDsGRnO7t zsf=T5&42QrwS>L7b@gLTDvjX_k{fF`Pd1x=f9^daW)3)V=%3c+ueO}YR1tEMc!R&$j9k&O-RE6#p#Q`C>Wi9!L5V0C1~@ zr?W*v*&H?TRN4*EmXV0gQawD0;fn0awPSw}(liNOE{ly)syU~Ck3vfK`MrKZTc=Qk zXK!(mLAcmsR79+LL8E&bOKQJQom8b+yPY4FU9u#+q9Zqqii(;iJ$JDuy&$dk@u@F7 z{NqnaRr?62s3ppHRObo*U~MkA6eZNa@s$otOP&>&Ry}m%b`YcHdiH?M4azRqa7yU3 zc!_+7?4UzclEN&GYp55+sJ&LQD19N2v`+(Gb_cpY-eNY5%MUTS#=FTH_LskBZ;HX^z?UYQD z%dOji%eKp6^O&Cyt^U4Muz3A|(ycovMc!N%pRr|OBvp(8?vJwJz35LW^ZarkA+1tE zUoq2{x=;@iJdc`HbrTRN&bRDurXg+x_`9>yw{^(x z5jb_|{LOY1f3nAcuUAPBrohXS8s%=g`<~uOULa;GIxW#XdAf|vn535K4|VYxO&`gr zup>O17zuY+%UlfMZbyF^L&G%EiHx`VbN>aE5}3@to$`r3m_7PRlq`d5q(HSu;8-#v z(>0?}@z!C}p6B?h!~WL4E8;ZeETV{ogd_tbTt&RBN05KX1Cm?nwOqyi!S9y&)xE@y z^{?|PhQ^0|x$?ME7yvKC6EUNIjGJ_(4JR)Va(> zQbzgYpDqA;>J?J(ee;%rxwnpWD>=1P~D=(z@M!cDPj{Yi` zG)P{BgpR@5K@_(H`Be{T5Bi7LSKrC9ob2Uru|(ksFgs#?2J|Dp>!beu!OJV?yBGPJ z=RZn)Ka9~yxc;hxl1HR};Cbe^eQ1Xwrl@>)%XjX0&VBv|Toa=7?75D}9({!=t1d$* zzTd+M&eqO%OmfkV82hFo_{yPbPBAU?gPK&o3~W+bpmn1~N~zQ5a}f7#a9&Wh9gjP_ zC>9NbjNWVi{e!}5*NPq>SrlmD0+&Ilds_x0GRJQrKl^LL_Va{d2kFpwE4+K?i{Q|( z?|X65l8p;qjGrD&jkS2zic5mFBHN5aw*R~Uu_Ijh`t9F8x4m=$Xd*=gnqM?7aA)JW z+P-2Ctn=B$iM}a5s`GiyE6Zqb!dYrOh;Kk#$>g1nrcsNy*sj^zFZgYvf*GQ04G*8P z7(+&&YTbj|u2i56D|B-xb`V@j(&@8ClP^GFZ}(IgO8Lu^t${f~n_}i+k(vKIjSRs% zy;Pi(rq*+eAu$oHczh0*fkqjDtcU5BQWitm#P{LIyITD;ibHgmzfzHO%HNovS-j`U zzohpQ^|+u_^fjcJzZ6+RSi{?DyL*s>-sQ_!kO~d4eKN-NPvc3D3Je_>0c4iYa6lQT z#=;C-x9?_f1?A5F$~=!LB+7Rn;bDpKc0-hzo0mV8DgJwjfY4|(a+fM=dfN;a$mhVk z)Pg(Vp9>Sr7)%*AnRG%?Wea1l+-vwodn+hnpQB4U$Jf6RnuYMyS#Zie<%1H|%b^?S zM%@7nfj35sNx1}?6l;-`dhBl3!tY&k-<9N9b$5err4Ro1jOcsC59KZWHP z&c+Pgq0cy=v0;G^f8LV3uD4!-a~dZ-!|C=e(9LTet{86Wf9s^@i#@i?5k<5udn(7- z{hhS5*)6Gen1Oul_p=djFU;l1bZO$<gmf+SD8)5j}Q7+d-PY{Qp5jF z(A(he7N@;Ce?or-gkLdFJ^LQeaKn*2m$quX!SRQ?_7`jF4HPHDtiLhaGy2;Sn$Vv? zMr*@c+=tj=Ci&V=ECN23Bue0w-TRG{S{xFq&!#@+Y0om(sJs; ztW#tq*{QeWC<-4=c!9D679UaI>R zI5{H`sHi< zzGdda2>OwCu18rXy?(pl^iZ1q`X2l_HOFL~A4qFrNYr>rH zNVL(&d$_Nq>m@?sz+MRvYxf-Y<6{eCVf1kQn5ebIdNe)3eM7Q(50~tLP5BZl^eP`M zel2WM-!{#;&t4bGpF)4GK>7}>SP}D{-6arQ*^;Ebco52l3;Buv%aU=$|IiVu>U3ZC z&~L+e*nVm%0kd$erTEoXZAb1G%K`hJEAM2vokyIy1{W+j85^${i_M~2&N&zDHJDc& z>){q3Sg^^%y7o;hik2H?p&f8?rNNy@lM$M~v%>m;{P;V_NpMy~z{`r5cFRNZz=1Fj z^;!4E)HnmRuM5?b8_V@o+0&4O`U;bl*Q80PtA}LzY8*Qf0`uz5F^e|W=f9l_H=?ah zenJ{~CfbpvOTV(B(QCYMvt@MWRjW1N?H*>?-U_n#5#^-!uvB5xX?gN=>E2?mhr1`Y z#3>3@?ZCpES1u+|IXJ59;`kb>Qo8hVnyN?bh`6bE(r;IPP@%i$Vm(SzE~Nk?5ks}p z_tL*}Ai46phljjwBY40&EI_*AuWv5;&-)T)>A*F>8bWEF&U8Zaitx| znyo#C^yVkBe(iX{M5uO+im1OjquQzK_>aLvhmzY(Ay=PZ(s@m z!)1@r`|6&Nv1~<6ubyls?G!Rbn|%$9RncQs)Hv7*(2|o}H1wQGRFD3NM`?iN@D>t0 zt31EgS9W|W83<1xturtX7@V;4^E1SsgpbV<=hlzEwQmfVNjiLd?ADJjbm3maw0&Z3 z__pE7@y)&L(S4DBxX?y1UlfMaL*+Vek`Ep0zKt(A8xr4z&@ExE7Hf5kT8$m?{zYHd zgxYvKu!*_r5?>_?O-6TU)q7>_R|Q?a9{YfMRzx+T3(k9*J#-sA_w;>Unp;} z`c0E1{L)*@Vx8!vZ`w`}Q3|+MK{@?ojHexh4v5iZr(CXl;USk)r(@y!IBq&rnV*l0 zvrD_6WQ}!y826@-shS?vKGzlW$_nJ$ZFpWxJHB^`OHhx-KWIppPH%%GfmE|Wlwv)L zT;;zw3Cz{U&}cDhEi83tpAQ%OV!$}%M- zLy-kwQ3c%LnKvvAZ$+qM$<98r9kySYX-?1zHeRXK=Rpv9J8F4k_J?(8tzOwIDE0kJ zOL4ePtJ4JC?Ca|YM;H;Q!KD}h(mJW5&6_X_&F`F~=uWdiEb|BF{V_0l%~bk^>oZ8y z{Rvy|LRhqMtvcreE6cr-J^gNR?hpmHis+!riNSRpK5}qT}JW;ct;1vbMa`+ZKmyGYA(gesHkWQ%%eS-G6F{#HRWP25Y(Y ziuT-hhgO>#e`Pn1WpunCLZQDWMu}0=Hyk$JorjOln6SA*Sv!wy@G@WPcMYfSedxkf zc}*sWU$XT5T%-UdHO;rXZXTGHW#(FTfNY9eg-Q!$zjfM#c%{&#jIuS&Fc!KIDCOe? zo?fBNr?P5EaJ~u(An%9{=6x{VEs&py>yN|`cqZG{qFzg*OhCmIl?3fljesBPp7|0N z>-6MP@m2;mbUkV1p__`T?H3-zIuhZ zV;Ld>_Rc!z;7=Lh_AsSz;?JQ%s1=?Q)y;j}D~wl!QPFqFTu~lj=Nv?rk#+;x)6VlV z1Ppc%{H3z?WLn#I9?ATUrY>>xVKHe^j`y`ETyAu&_qU{UsTt}S(>4s*c% z>J6{O(mxJ7TI6Y#P7|<(;3wOx?~mJEShz0~5&iKA+IvnK@qeCV&;NY)&y)X;KQweA z^8pczTdWh}W5-CbQ|40aW|wHh8AuY`JgE;c|7i_FV*IqLpl0s(b#x3OH2KB%k2@{^ zVlB3Cz{)ViO8(EEDp)E-{PSGAR{rBZPe?aJha(_u+HMrylSSb%PI#nGXBukJJ#N@r zt1Qgt7!K{hw`YGJLGGpa+!>%muBzklZ2U?4lXj*)UP93OU+>t26rTPznF)%CG=! zR(zY&oXT+|)N7NyUtYx3rb%VLV@4>LO5B~}$Z(90+augnujNT0yGJ7K4KSBT`$&In zbI@NnZ>)9o1l!xfTMtu!&iofaq*T0~?zE$xp1xTX=7R*In`6PItkl)2V9`)YX}^KZ&{29O zXw|{g)n)%GFZ%yg+gCtE`K|pD3L+&Sp~Of_r!<2ooeBcdjC6OmDBazoqzKX}-OWfy zGsMu{Fyx)_f4+17_Z~g>`|kZNYq1uKc?aIv@7~Yz{NmZ0$_QFn|HU$1dg#KH`ZhyI zeRv!32zvH<;Uu(nOasvZC9A%kGp2(WDbAyP-^7~pnF#P+-}IlOIlu? z4)M;cFH2$y>NcqP6U?R|o?qwhS&ZMyUf4%d$PfB!yW&y|%`ZJWCb z>#zB-*L+{C`t&tFX$=`QUvPamgf@u+#aUG(bFm1E#;A!oZkgl77L?MHF}}~&&{+ZT zm5|EBMZ%S9#z+p=;d0$Oo?>31jAthcox-k%6I_l{)~I*v2csW_S7$ClCknYuP62R} z^LAFC73s+wc=fYQrtao%P21ADd1IP z;OxzU!Q;G}ur;yyvkIEk_u*O@X|=Ai&uzW{+G38CHQhV>-Y=aNl_?M%1Qw!O@kFOUS`yq<8{-RiMMs901Dnmym8X+ zCdwu!lE+WBUUgP{Jj+*FX%81^c~W_>Ll?0r8@6tMb6=nmPHUp*85B|HQUGH?4E&+z z{!P8DezZ^DZ6ez|2GGrs^cx^~2uAB^r+RcD@m;RSRku|a|G1-S0TlyJ&S+m0_V&S8 zT$w0)x4i?o>C4U4p=D@2CJzYe;?S(sb-U%yzV#q>7ts47h}7ZguxpGPm9+tW_u)DF z$W+1WkC6dEqgH!`;kz?wuOayaQ=cpjg3Lb*^BsuW41v4PZyGoF9fX9J;>c!l`4|j# zl#PF$oyeG})_+l?FSedYfA7rXMf#je#lox2OxTQ-(s7Nu*p}Y|$2;;{L?CCDJSzj= zE`(vELR-T_NmCIzhp6zgSdF5b0zTa^+_!WJx=(rAmxl4P}Z z-m5*O?A2b*(PlRB8I|Dy;k>`Pv_`oI>=>;TiCeJcKv3!W9?X5C!vjNfG9Fh(DJFYs zW%jzeKkI}(3)-vcxcR;v7|>*yG)2A9y*iufPEK*y&l-p{kRl*8tZ@Emak_HJG?~t5JK7IR`HWeh z6&P$Uy&@}C!PjJb*2I_CtXFkunl(&Gsa2N5RCbAglJ2kxOaGnrt%Iclw}VWN()`NP z+-3dg8+3y>a9IqSwDl8khBkLkA%P?=dP`KdC6QJkwFYl!uE#6Bep8VFNBL7ypG?#f z4#ZSdIk>i+*gbCaTqy8$J>7O}HPk_?u|j=Ial433(s+6|OwaMb@VUB_VscCH%d^Au z=ErJ<-Erq;B29#Bzrf1Lb`hYYWO*k4{^XTAb8!r%*|7^l45fB-)gj2`(x0$`0|{^h z8zNrPZ$0df!oL^FN^f)Hp_joM)#Qr1P^MP}f=qZFp7sx19zG$xEv7^tEoFkYu{F@#&XXqV3x`8dJf7E#-@OE z7xcOsWzVIymhx7$y2qR(ld5kS>zUX|y|?m*Z^v}s+o(5kCrmcga2e=u zWM$KUe3=~?#QK+Xp)HrB$jrB(1SE1ewAMoK=cFQ1b)ddr)_$Ix5 zdHjoFJkcrX+*c!km-n3An0lD#+4-bLNqHQX-kAt76HS2b;YW~nHG!1(-FG{Eo|Qxx zlPA*;?NQ>HhRk=IE*?>tOjW#0uG<(`iAiWojIZ=upJj;j#!Yqf+A*}kuZcXLCspA=$r1OykCX`?fB*%n8h$#OTTU zWbBRtDZ9oZOVSSZFf#DS+_k6nG0ov!0ls`!ufb(0r@KoIr*phmbsQsqQOlB60`(3} znxO2iM%Yr}Y*MqgcS=s4E|4YIQ=X?MaxT-*&7i^~cL1ZApQ#(Ts3oirY-VEGcO!eO$` z23Vo7&BP^d6nEJvSIrQat2SQWP*;JYae+8wwqDikdd0HV_M8>kI4^&CYFyOgbZZsZ zS7VN9o2&R_qYD4aGUtAt(q~A{$};BI4Fw#t@^;O@xSY-%RWV4JrcRljqB>~%ICmiR zd)fYJjvc5zUP~~zs{&M-gn=7@8zh1)UVur3(4)<%2<}!sqM+i6rKrXMib45)!d>YnW}c zG)inUS34LeT2)PWDB$_1iLLr|F%eTl@)--MA=)|by${jK!}SULdDqm6q1kZ7xd?#^ zdO97olZ4MV3!O==OqRdQC@NTq&hDmaN}|)D61Z%E;vg+RCy_!H8x4)>gF;4#a7H zhZArRVsD&XNJIDS+}n+;K>Mn+7SB;nqBHEMkt1tl@TMUJEB!>?fJDx$&Wq+TcH_k& zN20Sx@*doimjbajOe|4kuU<7kzYr=%5rjB}l!WvrwO1S4?!P$H!nUkoC%w$b@#TGtZ`k}!E-W7Q8{T#a@Tr3 zJDF3-C-a9@688?c8Q)FOlhd?)VBaqRm9YpxZ2L6ub`l?DepQnWSQ*XQ|A%Vn&JU<~ zoH&m=oeO1{cZ9iaGoG4=wwv$f8{j|>%Rq^i{6+ZVj&Bnk4>lo-mt*b?0+w9DhfmX~ zQ@JBZW^NR1+V4D;lwO)Swaw%xyG0HMAsGUFNJtS2;ZZ47G?s9Gx3!fL1xpzPeC-6} z5lGE+VAcuxQo%0I^5%Oswu6=5StU&;`&Ef0sLfB%epcMc;!CKl)^)r;p1ZI6yz`!4 zTjtFQFhHvCD_b;AAv1O8fp3w^qyQ_5FP!5&5jmgie5&pU2hj|IO)DI*ulmO zi8hNTh^#)YNMZ@k*<86V>?mZ>LlL^d_Vp3Is6_fFm z(0&jsHrH$KG#Y$@D?hQF?Gq>U@jzbN7JlH-;MPbo32dN3?BGgTa%=Q#u^U!}_3dXg z%i~TJ(WhxEJXEW7^p|C57Wls@R2Bp*Z>FL^qv~bNEp6cX6z4RD0?m5$5RUMu4E>U| zXuv+D!O{3jiHV5e_-te_ezb8_@PeBYm%OlJPTZ%tq!s~d`iXmSVWy*p%WNt`uSC*z zvWP6gUKX&(HrEqj4Ija4Z7@?Q_1klt^dC8t9$bD?k69*8o-Uia6Wd`e759Q#)ukD|n5?DW0ZcgL^To}wO@T^2LM zS3lKK8QJ~pm?m)g%xvhODyr;yAT9Hwe>>9DfFhGUcyhf|d$eVqM{iK5thEuV(11GgVu?1 zeP%Fu>uuAx$Hd@Npnc^(BmFk%kg{yY!_m=;-BQKnjt=f71g0Wo*bgmx@F-lahSYu{B9|tRxJ&AfBEYu-|LG*CBZRKenG(7h* zzD{H+8xe`d(7~I->kpcO z+dBNeF{(^)CJ7@X&2HA6_VEDK#bZ#xCFG zr38j70~72fsme!_xGgukM(@9`sOGSCn`3IYIea$D??ti)-J|!sm ziNPsfiPNYcw(J6gZ@q?UB5~Ih!}@0zrru4GLs4uuQ>Dg<~F46eq71GE<~t5jj53K-OrkFTVGd<8i-BK3(I-vF>mF&f}pchyuiqw?NM4IZY4@V;4N>A*R zV@iznecCNB=u+#uT;)Mrj7aF^+O=P2otVDe52GInR3Wu?7i!U4c8s5ta(2W=)Ob0dXX^FykoLqD7rrW4e^HSlzM<>kIflg z)2v-|Tk7U4*p@orH~G^Wh6oSKgxRBUp*`U=>|G{XUw>}*pQ)B|B&^JT4=d?PJE<)% z1LO9O$E=`lnF+~j?B&1qGJKCxgkB`u`=nTxJWJsTZ{{i6{kLi5C#;OO#^JtA*Qx^j z3ny*|7egZ5C_opV#>|&KPkc$qt7}8P66xwZ%qVPJ;m}#DYB^m*T;3g z5=DhSq@-P$ZnyU7$;(ZPI@u6)H(?A4Hl1=+gWX59C< zSZepfXieAJ(a=r^)-$g5O~);7Wv{N4}NOeh8 zz#*!*-2)Oy@Dc;5;|Wg;nx2M=(%W2?^~n$HGu*HI>vOIQkj0Gf;gb? zqJP3}eqXb}vpH#dr~hrt#!c@{fH&R8x;=W%(-@;0ip`c%m*{lfKzGm7_;TOZytZ@E zUvW81`(~T+yN=co%U0|}z5&&{30Y(l2O!m@sm#ZT`mYiB7a>zT5usTcD<7PkkQvj zHR&)dS5*|Tu#icuITw4gnPo_b-+r&wfA*Ew2igsA`C%~X-OjBD=7VFh7@4H=)LJh= zEA3MZ4!V++Pk;fTxnz+>^n$a9wEzWdzrRDM{P+bEoVk33?9OW6xEW4=so|LO>g%*M zaGLToR=r(ZkL8A-!&BYVZo<_;8f!^z9z9L*Jkmc=%YOn_*zY&z1{w`9ew6ue?(WEI zIxh>^*eRRtW0UVMe(L>{B0^Xn+1b=pS*#z#jF3{roS6Y8%LzijwIN889Uu!a#K8z1 z<^}iHANKopsXN{Y!`_}en{Zwl@;7WZoDrPU48Wd2_z~HBGyk=elBu*8p<(@4*PWv4 zf})EiDlBUY(yD@vs-0BTQudE~ax79x_9~2C_$5HzR8Rn5x%smzSC*v?$XCjkkPxsy za{Gdcax4HS%Mok8BGWZXrCJG$-MTzsTrKRb%h$m7IHiPKPHBv8e1EE7H49eEF|8>+ zPA^Yw$k^jt9V&L(8c(3Y=b-6F3;#OHY4-HM(}d@{GDqO;k);lonvamo2Z=N*j~nsW zK)l5ej?Evw28k7~1Z4}wQiVI%aw704(Y=U#?=ZCSJgZJ}JHmft zjTTqGL2Y~jUH&|~zQLk?;-vO2dU-f>hgxilJh6C6`kEo)J`DDI{Di_PriTDn6KFF* z6R=L$!P1GgO?8Wd|O;q=Fc<--&nAsPnT3RMU}soi%sv75~+vJ!cn^N zE3c5tY56_V5t}yR=v8FjoyqP@@A>sj`7=I?A4`x%^9oZeay%`vp+23t0)F)R>$K+U z9dDO-4`(RdMO1sO@C7-QuM?J-5qLqP&cExYU)lxYG&&WPbKsszt#S9S+*ELW-9%`( z*s&729C%P+uK5O({1+m-KeqG=(bKlKEAisk3s^kkZ_+)%$uy@CNP);`Sd-Ne=9xvd z8MWzrH-x5pz~+yOPD?1Ib44D;%w^uThAp*v64H`wH*Z!Id=fkB(KtfnjENZ+w8bKB z+Cy*b;<d0ugE0w2sPc_uvB|1@4yvk=tD8Vmpv#ktRDH>=oE`CbEE?}D8B2b`HNCJXuQaE{Hhxx~qMN)M;9=`K26G#yB7l!E5 zBoRH(3)`e->;XC*k5&6ulv$xybR0*wFS2rJFSoCW4wRb)aXOBshyhqU*_h1Fe$**a zTt4PqJD={J)XR$+X<2kKK;c{9#4S?2>xU19Jjv;}N*e{eyYx!OS)8j+U1lnj#julz z<-8D-nzLsED_(v*)_lD6!1L^7cQw}7jR|oOGk^ZhPl9L?vt7iZa9?3WjuR>iVjB1~ zgAv;tqWd*nV=UMS3~`|QqTZFn7efEQTN}~buuoEQb7Y^HDj4K@y`ro`Vu8ZGK5W=2Q626Qo@`7Fy^VtB*d4$aU6arGIneW1YC zbZpR>3-TzRm;Iek1ZXpTBIR_wUF4w6QY2|IROZO2C}@Miy&Uk%wU~l9SKAchmz~iT zXHzU@4eTD?o#~k|k$=3FTFMP|Gx>jmudk{{Mr7MRkeff;Z6O%`aLQ&;b{nMNC3!H) zy^uP@Pe8g$Q+x5uNh#PwV;WBc#e{V(e_*AT3isYK)X|Uq#tSt?qb3d8??vv@Y(5}= zr0^;lJ1P6Ar*P0pNkdEEG`gG1P+9|LG4W|rwlP_=N*80xUkmT*I;+MigH`V0lzboC zL@s^IYBqE{51>Q9E3@9_>^Z**p7&^VzUwObgJ|MCCfRu})&2LI`$5ejos(2EGN~02 zCHi&X-X`CJq))yvyq@xlSWbVlMPBJH3tKwEuH)Pc7L=rvZr{UDh62Y1y^}rnl-6n$ z3+#vL7{%=?%{B~Hh}zE~)vyQ9z{@<`&s(9kZr4H2U7ER)E^qrLKX0Vpvv^)28@F*A z1aaH0AVQ4<7_7w}ZYrck${Tgf4ln2UwuS1}b@GUua;fNhq)x`PyD`Ik+KMP#iqjz5 z#GY~kXj98RWwGBv49ycx*ll-NVG5*(ZTPWPhkP5v3&|KYx7N6=TX)-TkTCrU$DTHD zKWajnXjNEd`j9l`0%S7GLzBWPtM`0#5n-WG#R}S?7WQVU$g{~=Ry1r<#CBt`Mw2b^ zeoi0c&+mNSn}LlHuy4$6+NGb!lWq5^ZWUUsV|(WgI(7tLI*BczC!R7&Hl<(Na-}T0c;A+rQCRW^XRe~&$oR> zIedACN4o2JM_7b!DZ|kS)6J#qykL+x+DT;((mv9e$Kg+mM(^#`BAv?l3SZ*4O^qn;oXEE5Qs*SG8=5 z6E{LYgxHRplg+7;j%i(CFM(!6@U~OAT#kV;<#IGj=GO$A9`t?K$y9z7lV!T;{MeK_KoGhE8uS+J@Ce~#gdMrJHdN{{SX!sORM`_WonKIqnXrErQgPe5 z^9+Yo8Br8B4IKgYerlH@lNNI_!mhfe7M>o$828iCH%I>nqn3|iRpxX}BPGgZCL_>FsjBa#hxsMbUv6B0f_Oj#9DwZF?n$uCsS<^3n9jeewG%8Vtr)A<9)* zTzIqsYI8r_ih70kVWRdb4UdH^FBaX{?F3 z4YI%x^VDQKQUd1^SftX4;Qlpx8B zf87$8nqu_1S*356l)2}2h<0lohp73v&#s=kNput#r!~)^JTDxdKwUVxJn--)3tZJ% z9zht75K}?UacqZyp_ZK9VV-{arI>M`o+bPoxgLrdtj>UkwY+3KJKAnTMI`{y*x_hzPihocwvGP@T03H zj`u4gTUG$q(}f?aQOVDCDEU@Nnkomw8xk{VgGO@QSwT?N5p_BqSOT$;N9&S(`dPK`dG6&iB zDKYW$r=S{-=4$w}GnHw}#eXj0kKW6+UJ95^7mGT$_8hfcc5>-pl ziusY}@;XgUXQo*2?v~0qd-O#7qZpm}5-~1)(IL@Bj~^9#Zcs>5s?vlc@u}$>2I>P; zn*)yJVR6y*xMP|}8j(f=J6y&B3pg*b6!}DNpUA!p+V3EBUKcARdWN>y(x0Sy`Qw|m z#)X1z&{_D5KmlcSSkjG9D(L0y(B@%y_}gQ?2hR;T4gogjC^bJ{vJ-Ssh|uAdV=cW7^In|PJv;Pp@l ze1H@Lu+o({m}z)IYsu2S6{=To%qzDxy-D=lVsA>d`K@$>?Hd9WYDta9S(>h*?z{5> zW-8OUanwJ8C#fuTBMzwVkT=Y7tL@?U@gUWfn`_S`%F1Qc<%;i7M+EM;ZugNCe^ zCMP}#c25AiPI+uSq<$|V?Ju>j@!VswciD1Eo&*An?;WHWe{ePUvCcm_Yf51pj|_ov2F`jcY+ zrsQrzU>fO>-xTO_z^r#*fi8R6&_LY62NcURJu4#LdM;LQ6!OD9Jt1Y!M-2=qRJg_* zhqCu%Kgq%?i)sBhBQ(%5P5!@2O^~K=Qvi765Ia{Ej5VveqnepZ}+h^(RTx-HszIxgdNC~>F1S`6WWKIk5U@n zDTmCsSUEJm?H%tZtL*bg;&)A4l&kx#UL@tk2y6abK^$^!0FVXkdSZop)1`<732_!t z(lJJWpu0yDfbImG)DvFaY^6-S8qme%b;~rwmx`i~Wn*p1$8zy4{69|)0cPV{`Hjcvdz6JDnw6Mnh`wGpR;yXr}s!Jg3c)*vSqx;$MV?K9%zTb=pM-1nbqhqPbK$!d-WubU$CA~V2+y| zImHm>=DWWtj^KE5;(z0S{@q1a3<9;x2gb2+TIzm)MEen%4Qmqii(ttx;Je{wz+#u& zN(Q0~2@Z}YC8j%MIVuBnp?r7U>dVNE>)lUs1tR~uN+$a?_NIf>e-~F+xsLHU!j{qe zjk1;mrs1oW&w~Ii%V}Y&fvP_lE=sTTSjEm8eYSZ{_x?!q>G}=-qQ2xmA}8#BOa3b! zdv}MErmjif_g#}mx~IpDSDG< z4EyX_9{q-$TMtGyz`+%VH*|mJTj;F7{jc$Buh4#*c>vfg|AoXOrHLVjL>uhR-;E3Z z;V`&X`X#j}|4nKM{7Y*2`kW0fx0vpFSBXvnvIhH3Y3AaE8#m(Z|FPyztkth%;8R!| z0WaxHl2*Fixsg(Aj_2;8iYtmJ@vNQU-Mmc%c7miRX#rWjI%$JvTZ=o z)W>4>UuBh~MO4IrnLp{2H5Ix2Xk$MNL%230E#X$v8g2)RM~^R`h94Nh*^_W*xSXZez zN3V%0WnU3~H*>POdag5`vwg4OEvMlI(3oBM)a>~etYmf_cJyXr;-QhZVzG&+6HZMb zwRWVfKTAG4zTmANad~w}K>Ro9BE&==%iL61+U`-pa;0qNKBihbdfhR!Df2M&)pxHQwn0#twjAQj(41 z#m04U+MdDjW01G?21jc2+{D!9=z=Jwjh3AjnU&zHvcE`KG0h=?B^C-Z}x1 zk-9$qeT3tIQ~n=>4yHX` z>+CS>Lyw^y8&8pt5k5Ck@afl^yEdA+i88JnNfa%~2@Gus8zNN8P~55cy5`WoVEo%)ws;}K9d9@Xm8T<5 zWE$h4l#09#ijz@Sn-9u4AB5)M^G73sUjU(OXMUMa;nQm;x+d$}RruO!oJ(8Ny=*3A z@LiGQp(Bp@Z!xiJSa6|{{h^xA=xepxF`VMTKXULOdTat&9A>-?bKe5gbs7JgYJ9x+ ziPGkWy~UqFQao1s0ayIT>{3nVKz43qEr-_-&Q_M}rpq!f(X^K~3e! zOfQi{@D&>VUHIKs(aQb^uJX>mDhBlL&L`m~9(Zunc~m}IQ3Z>aM(#Dzb*^O^TlK4yK=dpyG`MHn}>u@if8uY=)5|GMwj?(?=G_?{1`szJv z9I}b9^)E7$;J)-;NV%Ja0lgxMc&Xg>C+nHyyiHSN9X0>>KjdSn1Hzzo*#j`p3);@= z>2i;U9}bxe_*aj!*ioM8@^9Jh`-PQVs9bgelw^Lm0t)E;-#JB*0%A|@%cOLc--^aE z{{_)_i-0F_S9SH!C<^04s^3Q23irM?iLv|g>_3aiUs|#pRpkHbQVP2kGa_7|h(xZt zKdZrRCT)MJ8mF++W;xW$pV_m1D~GNOW*8?^m2qpvHu&YWGsev36XMOhbh%J5QJf3L zx4SbLVLOTjl}md*VhMQF3tCXjcmbB}@8!_x{}+dmx4#L}o-47e0^*IZ%{%*|hxTtH zTvy8XT7!ZAAR@Ii|5n6dCqGB(oGX3!nEM@vM5KbrI`{r^+RAj=|Ba=2_L4@-@Gqio zB_j|yB3F1{p2WyUCG=v#JmOtRkZb#p<;Tk%2#gF}q#;gCXYVF@CyX1d1;oPqe{w<^8|EYiD=$NQd^Om2=&5@j{0waSe zRqpnR^wH&jg@ffy2;+@2OBz67X(z*{RNT(?c3O0CQnKL7d-3XTVAtEZoJd)H29~@UJoJ!iu zH_&J9Z+12WYw1U^kY7%fT2X2OOC$3toKAcmck;7~_}Bc-#fl~dJ}W!gI-xj3Z$h_% zdIYb3=c3P>wL0YCD_{A7yIkQYkKz6->kA0Q1aWpP1s}U+-X8i=lhul7v%u0Hi0an5~3ps_LFT&&u)RjMts?}?jZ(|l6QD`B; z8UfrHQC$7<=&;tih0O1|XJ6U#FUg9C>fIC(*UA(?3^IDxN{25BR9-Ht>(BC{G_JPs=n^>??Q4Eoiav+{z$cH9f)wY^Z%w z=3}(wLOLj1!^kk!fl~q4;Ei%XJUrc>0xvOiRv=l8_Y2Fd+co-XBWGyW1_$>^y8)GRFFVd#aM=?B1u48)?Mf%X#C` m{|ngsU$7I%p+VI5a9_~*(5v`HXcG$JOIA`zqWFcu`~LxcVn@vY literal 45986 zcmce-WmFwe(83yY84j}#Q=>6X( zM=5O=FfhdaKY!rKw1{|MV4uOH#e~&7^iS3RU(wB8yDull7H-yVrg*7>C_Yn-J7dy? z6@=ZYp<@idmN=K>bUg2#(NTs)81s5#^@w1^G4Lb9&GX^1x zz)Ww?bq6KJekG6Jt0hC>K4s4B@gi6r z6%7w0pT6R)XG*u&pXUmSq=dGIIE@)pJr!J?jsD)-ULpn$7Zeu;Z_j|Tl+toGRzZiX zdVCZ4`@EU7(6-{Nkn&efK5ue-ZX;^gh~ftbv<+&kzwPv55U7Ieq@aj`sbW3o8z@mB zL?_*qABg@*No-(h*@WX2!=9MiFw5!bC~0KxVg`|eOT%34co~n3j)QlfUT!+Km>s1c zOA75XtbIs)e;|w^e&`kUk47-WU!p#e{T)Rl43iW*{Lig-4F-Pl|GBFm6&@~~szKFp~gchtM?rGsmlShbR>gxcaf#2``!29UO3(luXy* z(&7BUb)gbJE{7U+A#-m0A1}FRsV3G39}%>Znjx-WiTjo33N3i>UbSScq@CVH+NRT( zdAcz2)HTXxP0+jWKr7Y8B(cZXmUL*s z)_4KsS4A?FA2%&YmwmtTuVwl3sl#uGSaa%HhfEs2NXwlLhGphj4A$)&*dva8HCCbX zX*NAUo(X8k@qd)IUrCJj^NC0Y@{g6qP)b97B98fFp4c*+&E!9mU_4e_?4wv#_MzEy z%n|o%)!{D|(;G(~@sH0>9w^0Q5qoUa9d8^V&@bRH$mTlvY&L=<-wUs~mBOt7JXrc} zq?{YB2!)}5A+H(NkC}?mL_cru@vJb9uf$%W)x z(d}013+ysATx~Z}vI0Fl_z3w3k)WNO52rfiPp{GrO1tOr=A$y4j98J(X6QeJAv`6i z@lOv|Nrkg3WvVkH<6s3VD>LiNS|%fFcep^B=;B&QD+@C$mZt5fN=LA>mE=k0np5r! zHKnoJU^=u5WDlJ^@Cr&q?PY8&wC*6NK(~fr88*8teS( ze==}KNa#`)zFM%?>A}tv`Qi*eXqa4u9VTxtuA+w4>GXVL0kU};+DUZSnKq!)y`Q?h zDZeLJ{P|EtkA`imYL$$R!M3R$UGu$Orq38r!;m&wke^8uoLhbO8gQbgI?n$5va z?L~c|dzhlvu2`(wk;<8~?a*|NV%0T3J*el@dFxX<58>DrzMsn-no67ra;UUqCfr+( zUoNiVq$Mx;9u$v0U-Y6rwjvU%R5s^#PyS|I1-{UiiPPe_Dw!ilv=)I|Bz>QgPwNH- z;h?{>UZf8!(OcnA)6h^>RvymrSw_c5onv5Rpfz;!p6Nj3$*&e z09F+-@3JoDcbdJ*^IM>pk?}IKRlQH>xQ>!}5BMRBnsxvWOLCyV@A|h+1vN9`*1C2f zRW@+i+YkceL0fdKzo&XmRn5B=Aiw~=&^1wy<-3)KHAW!G>^X9K!=9>Y7Bg7$`k}b% zia%W)_khP~c*8z__Da*!tcRINeXJ#NJRNbuHZe+uyG;1l5FzIqfYrnn6KUfK#k(C$ z!b{O3x{Z&~X@AU)3{nqRe8y00od0chB5bd_Y4Ymlkhhc#onIjACqMm&PQZ+VC9Jh6 zi86E-s6}$aL^K$RO3V#!yhF8`T+cuzc-wGx5~q~S#5Y4jc+i6N!RTN%>gWwTGetc_ zpsSd8mG}6ovp|*U#Q=eFIBDZo?z$Ym=-hiAFj4{u{%%oCK%cgfv~9x3;IiVyxq(ZD zD$V<9vkm4%@y!kdJn(Yc?H=~iKcpvw2!=%~F%MCqm@(%4s6E4z#X=E9S6?b$?ffW8 zR)d0z(MYp#pJDu&Wg)1o65nDI(EjIl*osE5wKTCl{hpx_xjuT7QyCey-R{2T=G~-@ zhH@JLJ4uKoO^y2e8`QjHfBaG_X*RsH){ZkZZAG`$%2<<&`zFKn<7pEFJw-B^Do)D- zhEZpz`q<@ge={6)eGBf(%R<0e394~|G?S)^meN8Vp_&Pubht8Cgfn%`6DAaY$gkTQ zF-l4_vq{QH(ABL~Ml<7v(|6>3YLc!w6i590SY|EmSlUN@T`zSVjdoW~;G6QE?9&nF zAU}7oi?^RRNWvwI2WkEFW#xxJKKtc5-yIhB^MX~o03NKfeg%8kSzp8P4>LQDuU`%& zIz1Wt7ISQZd8L1OSt52#Cf4}+<~o{PhBpl=Yv?N& zDX(F0CJsx9v}5@978sY8pQx!n6x)hRNEn7kl6K|&;P5laAdCb1a_KPU)qym7p7wJp z3QJoBJ+5ad=e6We>v>MBb5$Rlxx{7CkHT@ez^%OwYYI@Tcc^xjo)+A8i@|q2;-mYz z9JtELj8Fo$3QbOw0#KW zy;)q(l~|0JIF;Xh(+$HcN7P_6JPoqJ`Spr+XX@FVH1XpbqOZ3-oPx>F;NlA00q=C9 zD=t7cS2!37yNxZ!&s(S?h-Ow_Xd}8hC$#0f5uPTR0O$S&Zq@O1Qto9NOkb5}QIR0> zHS(!tE7XTeo&cNwlw0B`Wo|=~E2qQu7symrZXt?M@lz_n=g=a6fTk=GC;n@gn(gxS^&}&b|I7VDjqZp4Yf=>(*uyFDkP|5jy z@wtXE)>Nt&I7Sciwr6tYigGX4lJknPrMj2XxcW@iwZt~3u;Y12gt^{%YPP`jfy35%&_CA+=6<5p->pf}#6BMC;Na^# zoE&@S$Y~wN6C@f>Yt$QWR9oPfXDmrg3%hWMA?xRB{)w++Q$+$|1O|7JxKjvJA^QhiNr2!Chmq~3x1j27M# zq0iLai2)YW%+S-~n?k4enI{PI!7t~z7Oi@GK~rB@+SnL^Cp%(jXTX$Yx=7Xg3hpbo zaH;~q&(#LJ(7tem5>g7Dh~C~}c`REEAqdeGjzF2(kctc&U> z*>OT_hzpEs{cfqZCF4&k%R9^Cv9q}s8`rsGVng|wOpS$D;#lGpvlAfrKE51nwXj9) zDGvf$gHd1wh4biC%-5vkaAqH>4vgnTSxYkk$%6n;3Ph~>On)1POi#&oaOOaMCnNMlbMZnz;uo!#S)!9Q-c%gc<8yf!;+PmJ z1w3y5?V{PAG&JUyDVfcNmax~Iur3YjOMy1UQIuZmqN~5>?N1)rB-Y>9RP9HMXGUqM zb)I;69eASGr(Lv_%<~mD#AHTL4_9W#7)J@d-yX}J3@R?+V@J(d9{F#qcVpn~7_Gb& zUj`hi7vmb3Im`b*^{^2R>nYTGeSlxh_UHAg_kcNtUnP=f)H?DWUp3V1>hZ(#%Nig5o>`7APpm;omGN5f+B{OV z)%Ud^t2c}($IXToJBof!pL;{rV*%YZL^>PQ7@$P+4($BkI+9A$ZL`+X!b|%m;&n?E zN;Mt$GA(Td6=OxWb&ZmemRP{Nh_5h;=K~}d6)ai54tHE)BsoLMRfG&pMLDn_czVqL zzT&-r9m+T^Tn@~maSDnG0olhY#-!o#3S=mPsi355{B`}rbc2a z5B4eaDedhVitjGRCWoD}WAid_bxdr2BP+^AQ4!<}3xOb~+WV5jueG~K&L>~ecpQH? zZNWwX;ES$9m$gD}Dg7?l?mS@X2>qc`{nmoV&nb2F$G@%V*V!R7tw5H;`ZG|(q$R*d zJPZ6dD-e-klhqSROp{Uw&o{MUwpN`0Hl_6%ckQ@gBv?capq6y!u(!Y54C#8`M)NT< zAlw1X)!cixwLkURtTk~uh^y@!^nER|=#)nSe7HZ0hk6EIK z2aDRDU+gzNXYqy(5KUT)vGz%ER-1}ne$hB>3wh-qr(0dh>@2hKcypoEygd%ty8m!< zoI3_;!SgwWn(YS{%)pOhQ~udg*lOEaZ3KlYI!A!)yg7Di3;I021wZ3PFo82j{#*j$ z}UA|eTnY%Z+NSJ^~d>ly%tjhU= zi~dXSo3|+oR#nwXkbn<9Ap6wtF3``+s1t6EZIF-C-`@9_Be>*y*mf`Ev(1%`EiZ+4 z8X^m598|^rx@MK@=(hQ**57Qz2mU-UykQa?)5o!!laXo6>sv-*7q{mJPnpOX^5Xc) z(7<_8m%;C9s7Vrjn1L#)6Wfd(&e~5#4AnoG_7}lQrN5Jv#~Hj>F_9-?T9^RBp)8Zk z#>@r<_gO@-Rx7FppumqFB{*-$+PF!(zfocqyU%d$&ydoQ3Ro; zcos9C6&QGIt*oh0Gk-VWOs>c#x|n>k>nrEE!0TL`8!D*cgCYf7YO9Cq zcJ#sB*TEHbf-}csNNa^x&@RH}a!pk7S!m1Awx{izF;>jaYF!?4WaW|V?#f%Lh=}r| zsdW1KGwv)W&e*=z3+~R9odFXV7hmu>GKr`cxa*wUQC|Q&|4>Yk!#c}r>Z!OLe}v@O zVfS;1#gE$htkn->P+y!EDhKbT4t0e9cU9h*5zkt$j;o__*E0w0 zF66)@f^P=Az>pB|L}hR`Az~I9h&`nZGz{m9(urvTOL%rEjI?_u=tG$ZDtS550f#t+ z>CEt{7RMm|L8U!Di2Xaog2DUktR4!EaUIbZ=$OvQ$eZy zrgwvT<7sEU##y7cXgP5i%*&t`#-Xu%u~6BE`o6jag(TFBz;4x--@KuH!l3H42F`}X zRY8c&OAjhq;Aa||T4`|U+l$_O#~(Y1sSh6BYf>%VeI@N3Em*1>UYY52>!6~FsFSs| zAX50<)j>(duR_Z$ip<^WLCqd^QYCU$|It^o?+qr4e zQm4bES=Ws~V*rn*OW_l5Zw|P~06%4z_j8(8jA&xI4-fKb_1{Q&(RkS~?tC_fEY7!Z zvYskg{O#dR2jU!38q}K!hJj>i+&;P7N4b6?;ZOsxw0mpTO?Y#dOwyUJ5pl3Na2#_; zupC7Qm;%4Z{_s_;^Q>J?UHpc!e@`}nEuSg8)8Z_pOWIY0s}LG_P`{vsi+52*$Y*Wx zajvze5M%jv-|IINt|ZG@0b=KlrioP7mUK>MtK>!KD_h_%FLXb@Ed{87vduD_?<{Ia zDv;bE5>SU3y>%wcqbH&`oUL&2u!gpM;<25>s)aiF!Q6vWdwjl@XC4Y|cLMfprEt?U z{xM5usZ5Wq$pkC4(*bjys=xg5%$w4Y_ZyboHu~N53b}`{&YDFUnAxS)2xA=0KB=kc z%E-C0eY(`xuEKmb0*+i+_&5&f<-L>LDWgmO`^m3^IId9~HBt=c!lu1nq=60QSYO8h^ zlN(zrc!?L`N_k-t1%-aLe_Oe$z2EU}skw8A9?9OgTxNGyOKikWpABCqvQ57r;jDE0PaYn%#TpmxFF>1A7%9f z9044eU1s7=FAlUr#RHX_!V^dQnFyIj^yNDvn}t~G`)ANOyKA&rTsP)4oW8x04#0f( zfJh6cfr-~9R$9G-VdfGxMzj_L7!sf&c>9of1CZC{UFZ@n5ZiVKu?*#`= zXbFo10ZsW)~p7HE;W9sB6hwIgxsq;&`{dr!b(yy zZ7H34*%)y)-Z|nsF@3-7gAF)MKu7Bq*r?6ZPG_JB9 zYJ+rbO2*7629jjd?Snj*T+hU*2;rr0N-h3PzS+Y>@@Ho11^6 zXe!MooiN`aI?i}bIpT^;z7crQYCTx&7MF|*U|tAV){dyEx&Qo(gZ{ysm8$~RZ(oRB zJe0fFzmCOlv-Ga*@qDZTp2Y(_3Dx-X(M2f=F;p2YeqaRif?r&93wl(|rC6IRb+zjv z0)qx4oLdy}s3$sp$K5*Y^s;3&Zeu>ZB5`(O-`wk88#+v_+>~rJq^c}^x5&0^9*u-~%cuuk zYa}a8Evel?4Q&osjox>icc#C;C<0+H1Va0}cc{2#asw%v@}b-iLl<5}7cG49CvC2e zI=N3TRiGr`N(UjqL>J0R_Fw*{( zXy98NJ(K!JXS=P@!+cUJ;vkrkywADhk$#`#1j-;0xDb=!2??-bZuU3B8uLx2QS29G z)NFV%UN@sHI_)nqn$dcCR{0P?AV>;MT5Rx7QhzvgCo0nmsjRRV$F@7r%b+*#4KwX) zFK=2V(OoBsU+85L{XgbCh>H(3?ho7kmxY&GhRt_LJ^LoT?V<6KRJHpbYCoUzqjyN| zUt0b*tCY}8*jgK4WXDiZ$+M*sFgI%A7yS4A9JyuGe>8qQ_CG1!JEoU_K>gRhSY*7n z)N`eN2Md#QmOmTYg`al$-##swor6DWVPFuXktrxu6q;BEqR7w2JuUz1HD!ji0~m!- zm_K_sb?n;N#fAXXEZ-CRUJe^kkY|N!9}5j!gp*Pu`J=|8EEHBO8-_M)^M|Z*L79B|%C}UB!eQ zO?7o-|92{!0;bpyQfH;@&%m=3EH;h7(8Y-b1j5snv~C_r(FmK@>_==lA|kH(1h|fV zo?LRJX3fB&Q_;|LA76Gv{u?k1O;}9q<1lsAQkMf@lLTxV4K+MGhMbWTx~+r877Qh( zfGTYqg^aS~TY<7x98p>kVR|iOnjFQ5of&G>-vtjti&@hT9yW!QtHwaXlhpS`R98<9 zC5ZArFPBqSC-;2d?(ZLjWUv>4NU$aZQZh;ZduvC(8Cw)MYXU5Xl@qYNE2p_>y+Et| zkMi4O0J7?+kJd*VR2#OM`)7SqNGWiL9^D-rgyeZJg(_goP2_;irJ7-fSDKXC0^k$}M`V*B+@KU7{-VH`vTc)zh$fTW;zq35mnSxj4{ zkH;w7z8}>ZbeM4ARo;%E-=Us|!Nbo*dOM2zmMlKJAu;@!)#jeD5r20lmv2y~NWu9) zk!FM0fGptpk!T*U?sto~latlk($QtSGLq4D`9B%hQXh}%|M0y32vXSQM>4y)gMAHEg~37ftNJp!-l|69k=Gy;}RWV=RN72 z2YA#?&IK;O+h98e2TwV(p~J#+mz5FgpOkFu^jSv7i)5zC&uy*a;E_M+Y zjy2J{w7In0p3zEkrm7l+Nvym=0$%}yZAYgvJu`1WK35ZxuFe}G(0P@%16_w4hFW@o zqy1wRPMxoLs@{hkA^WFAgVM)wq1vC|`>!VHcgwK&yNkby-WZ(ZR&LYIX0$ExLV16H#al#=fFY%v1I1 z#4h*BCD{MU_=>%bWPDm=u79CC8NTwT&Nli$KI$qN#;of!tc2Tp(b8YE%@~v)GJi8$|Bdlsy0!o^n-Z^9Vvgf@{KgShv9KI|0aVYieN8Qm4dpXk{**BwC zeaeG2CQ!BP^rTO~a2H{>ki_2h}Y+~+(wahrH;?Tf+E>yo9eHry4-KO zDz8{>Hu^xD!dQ_i;PDgZ`~BH?WjgCo5fP&ZG?~ZGoO`y!M#RlZmk_6kpwVX9&Y%rJ z&ukn6hk}A*$BDTrLr^PC0UB!9V9#SwX7gF?YrUr*uQ%8#ulDmSqQM22QcH}1rh;kj z|ISzH-HR^eSa1p|_!T_Hk}_Tlf7zm1HYmWPxB0@PGHwMZ;KZE<;Pwmc$e53Bgc5XT z4_yHRRDsqI)WrsvLX6V>lMA2|-rED(XmZ{wb)6Ad8}0CLzE)OKp}AlcD^X?yl~gO)Ddd-ek23ar@5=%ZCj>&uy2f6vd3~>6h(5f z=plF>MT=!8&ekE49eS${pXsH8Z@Aa+c5Hd^Zb9Ur*NQQkQR4ASi#F5pyTVd5iG{vN z5nWza#$0A#DpHo&BXsrNK#)W1lQMROFItgRGOp@Jw@}&S)Ev=?Q_RnY#;gm#wIDAh zOB8u)QPYnFR^Rw{LZ|Ac%Ow*N4V;f(mk-G3enqOzbuK@*mSWer)Q|GLJ*sZ2lKg8J z@J&WWM?C!|PJZQmEJcubzj$#bG!TIzv;N9Bfn#et+s?l00miq~`1TXYrrOkapRJuX zl8QmMyNjZ0pwW=E82;NQfoeNA?eewB3xRa4r`;b1fge1;6^HZf&P&Tzp_%!=zPYb_ zeOnw_puaD~eLe+xRHk< zy7P8DkR(2jnfS2-_)7G=iJJ80f-NzGByI0yv3*)cV(gZiSyO+Eu_icSxqTJ^pTnMU z@JJ{Do_%9B)VcLw(XQ|BnF{Be@t%Z8*?aO@%n@DAu`b#VPGzlCC6nCw)+DnIYj-B!mhUR^rExC+y`rb#3JI&bmX;&>1SPw_NHAL+C2RRUg*`mAd$?B{7VWq)wB>y; zhh+NBVml^Pu*hIi@nOSx|MQ^k(zK@FpvLpww?jT>;qr4aYjriXrQydrb&x62?p?U3 zv`SCiF+js>a`&pM{?<-28Z_sG#B3+GIC!l7Zv_*r0}=`eqe?XtI=X?m1ZU@>=+3sf zb@XPSNV^t`nYyu-1%>ocxv2cO`CPuC%r4D5tCime!AQ;5uO(?dMS;!*%)1Pm(O3ZF z$NU=oJ9>Swt>#N~zxu3#3>>YA_w(4v-v`83jLqfv;h|Z4E^R0om+^-7I0{K%am}k^S7)n_R*5osZ}OI_SC3tZ;RcjcUQu$6`H8kL5r$-Cbp)!;IIqFqFHL_q12uPWb~?_tVKCRjUxm|V2#?EKuZUCc z2#y6{3ny1ixr>p&qmN(YiBE+f2h87MU@Tx_dWd@$iI`?9?1D?VPEd6GmCkWkxnPTi zHviIlj9u4kfbC6s7WXUyH3A~>ua_DGL-FJ9`XgTv{+It9#Fgzsd(|~PE_m<;Fiba~bi@Baf<`r|2G zlKzhVzd?-un?wG;pppL%A09yql zB(`M!UqILYG4)AG=r5ij4*LJ$Q2{1mIz4EHw|&9p(&X)@m7RiGOC-PNN1WW}dV=NBnFOB=T0_rj96q$L;y+oGm<1TS}JyU z9ny?hbc&rOn1bpkEYfc&r94{ z>oq&hu=06%Lc4zZhZy^r$#87Cq+JQPbC#e8I3Y#%bq;+&1obkJ1zx{f9!Q{PK=mkM zrT!q{Yp!WFu;eg(q$y^544jQs#f8o*e~eg; zcDq57L)ci8#Lw@qW-Wyy4#(eK2NmLkE7;2)FHnm?g_GPkRaqu3WCT&^)%{Yq2F9sf zpFAW3%;|d(^C(!3hPCDR&TU)2Uc*V7gdjIqFn6rk!U!uIjcI)s^ILp$7f)?4306>0lw3MZi_cq542LLDcS z9TcccwOu)Jt$l+NW;PB}5Z)i_*~+yyn0z(UnLg21Ga*5lc504jR51hgA1^4uI~^p` z=qgy8(nK&ob*u-E8r`qNbxDjAU{9yZTi&rSHT2q26_!_4x|12O51Q1vl1ropFq1&1 zcEAG-9*%-me7Vz9nQ>^Gl9Exi^dcq#sCi%=%n%hccT!2;(I^!S9lRWr2%}9BYV>@8 zvYM83B|D;z6S43Fdc=f(54v!Wb$8z4)}2QNMx0fM2WmlG*+njH5q1sPU>--iJ?a86 zG}*0(?C_*ZUVY@}-ld$aooeEAv*-F2bcEpFovBQ|bMEdw9a*+|-XXDVDV^1{?kse!?(cmn$r>hRoM&}q|b;)tAQh}Mgw+}af#`udFkQ&p`_yoLaOr3>&w!U zVu?+YTGXoI`L3Pm%q(hKTR&wM5@=`XEHs%YALW>hy zas{pQ$t*@N7yO;L>FaMkXt*`8d!JhSH|mPzmwDFRHk%Ou7Bo%$#Y)$fGd^B+Kfn%h z(DbH=sOI4~J&B90TTJb7=LUZc&B2p9#FBt~l+bWx;YGOZkn{D{e4$c%d^;HKs0fM3 z6SE|n!{4vt#<=6gaW2inn18^Oz%}ojP%$e%Ohj`t2ZJP$XXcl6zmN`(L`r;{3sTW* z_C=WKFY@M}!wkheM_e_@JmJz|k`A<`$8DLYKQd&f86x`*ayvc+=-OK04%m{j(I8m-KKM<^^ku-Y z(&Bpmd+4FgctVg$5l31r@X$~pB4xEdWqg^7<#qTdmJlQq4!yYEf-asTu3~%(`}j)c z_9Lfr18tr)GnE$#W$%G>O&Cge?HX04Bp zqH%kj7VEx@Z!Z70mtX3xN6UjvblbW-+3{mNlZ+O(LyH$rsc(JRUui%RAjS@M8;U>$ zqTY*mi}_-N_i6ugp;Kr6xMvGXzDANHy=4W&A+l$ycs({*CA|-V8sG#2$ zYwHwt8+C!tb~=tW{@z%@-blu^^;d4?Yvj$V!~JfAVroGs4%4Y8T5P%9%_Sk@<~z}g z22}_$)&cRH`hFOhi&2-(%j}14LF*#=&r?5BqXHaOgKvFq#&t{ zjX{5bRuFamPxH?L`Hupm>@mN&IK{;=gn=J}(O$>cdb2OdCk1cG6cyhxP8Ap-X}e4= zeKidmC0bV9QdQW-7f+Hyh_n8&J2WTg4jFK3k|yk>*lkgRUv3au#BWR5sFcI8N05}x zF(a=&aLs*;qJSK*9Z*V->~m<*NY~^L;rsdcXJ(qf zLf@i?&%U0x!wnh&jG!%N?$~*Aep$+jUbhOBtvA}>@OEWJcg$zsv^WV^M0v*`5~v31 z7M1Q0CE+_WL11sAhAH3l*B{A+#$;RJIfR6HRL1Auohd5X%6R{3JQm8F)4D14nEBz5wDZqhhO)iq#A;Z%6|(QkX|^Pr%jxIp!R zMwxgEq#C-(3O;BgR!pVNdxU^>WL|c~oh8vkO|RTcjcJ%k?Vs4>D>x ztxe7e4Wv!4A7G!C;iz5RDSK3;#J{5NL3(HSs=rI1#9S$};ByD5la!;&?aNe?lWjZh zn&^CHx9a;quFL?#yY|&$5g2OM-q=NTYA!N6J0x!j=IuK^Xopj;_q3Y6_c~NjQ3o_{m5m(a?`JZ}^6r0TQ=hnP0=3 z)y)Xp?sYCu;f*(?D`vWoNHXWzr6qf9=e~Y8NS)^M>}k-wJMDOO0A*%9Hf3_X6|Xpg zDvf*2m$Mcr9sflGrfb=B+d2yj57PT>A2MbPktnZhjqg{}T?Nn0Lb5>p6&X$zU85;Q zqvhvyT9ao)>jh7ctlWhL+=Nv@0k<2fgex~~Slyxzce#!T&EP{L;?autu$q`)lTgk` zITFAw1)-8iG5V+QF`^BiZ$N5lgq;i3$sLTSy$#z7OB$-+53Kf(1YW0}J+cAm^Df6Y!c41(`-GdZfWXg}8* z10mjI0O$#c#|Te1I*L}rMOE8jbaXI01Jz}J!f!|>w@PYJC{U~FF(Hal{{=p9AlXU4 zv)f=arvNqwQ<4LPOK?t}0rW`nHM+7Jw5@gL9g@_g-sIAxxYUtbj5ndV}TUOIdw6_On0_V-#Eo(vrxfaok6o4h{4I|$m|eV+@l$OJ<%7;G*5 zm~#ID{{pISk?+Vg3@t;K7-cBvZ+#0B9cllzX|3-I?t6qpyu^R#xa<{P%JBWHZts2u z59gxzEfH+i8o}i`9E|dA#VBb%N%hwVD=UEki@|YGQ^FFX3#csT-~LpvC{Uorg_#5v ze=GQHe{?IuMWP@Q5l~>&+1-Ed^ha?8*gx$e&`NY`8Likm7)OR8MnmLKu_ zbY*im9j#aoezELs$LvlRj8&I+2X9+)8}FZ6#{U6q+gyt^suoa)xKTZZmGZQ7REE<9 z2BqbdH=~}W1dkY~>>=(ilc0o4PN!LR6Rr{fted!?6ZL@)l*RCBYHP$MeEd!un~@}K$#PuQ8eH+%s$FBKfnK|}n#ObGckZ~W zj3CdM?hr?bL4omS4(d;;;tavQ!)|J0m(pU(a8#A>m_Ps2l7wBO#~U1d{@vXkz)XW! zr}3?7Mm@XOI7X3TP){myC47^?HHEKWn}3iKgWT2Ms{qqz>K0ntpwCUNAx`vW2DM5i6hc1itMhCm3i@g z?VfYm z6sOPL?_NEKC&<5FY|@6vwDQb`u=9zPh1Q3_VXshN^=_4P)$q)DU_1<6`<< zFC#wNpE{NO#A_?^K*qB$8T^{P(e&GAo9FgLEV|%TED4?{FaOZ$vyi&Hge9`%=(^m^ zjV{n;8GnexQK7OtmtV9!776tFcvOux-`8`=lS>HfL;fxHv%AiI0#*(ctU8)#P{-&s zDxlNG&WnG+9!-f3zI6|rf$k}6UvNEO&F_yXcgh<6xg;YoxX^8inwWsYVM}J#?EDqIwv?e9%QxAfX={FE!Op8*ljTkut0-r_t~zT;V@!St0sI1_X_WMn(=pqjhWN!!HgIpJ;q__7h+STx(b^B8rsS%wmVZ1%e!x%CBM`= zl|?9$ZT#odd1%DtN}8&v89LXodvi2qECk=>z;m16mpTGTxZH*m0t$V)|v+sB@bI;;Qxo$fszbGlOV)y_G1YSJ(nU1M?DN9tlQ2KdBg3@X8`78eY95HrxH;RC7I*sFit97$0wP z9yK7^wAoQ#1(<1;-=7Q({AxPe4}Rd@W&DkmNGUkF`hi#^m}v!3o1wk=1QGwNn+G#m zIngW8eszp!xznZK*d6LaN91qUR{;a)xth!t1IEJoRzT^$Z0fI9@7dG`4S%z#OI)Xz zW}@?sd9qleWPdlCx3FdsM7`YbSlcm#JF~)Q> z#a%@pJ~96(P!O|yd#x{fsBZz({w^VC=m|shge|o1OxAIzdOZih9ipJL!po#xJ>Ats znnm6jLa#zM)zy*3M%&c=_@Mu{@Iks(18XF+FMtutX$rwI+0^%i+-677LJ5!ER?lie zMTX$h4em_rofDL<_dbgWh{U;l>$q^*1PSokYnwXa_T0)6!5ZQ&203;pilfw+7Mw0D zpqgy+;HK`n5*jZ7i$7dD3qhOz zS6(^f+*5qXqvX2!3#sC5>{rSPSnsk8?Sj@i*I^H+h6f_=g5Hx+U|s!ViQrNjfPvK< z!oNBcsLEQ`s)`m3RCQT1d$GEHy*oO@3oS10n>%O}O@ed3-npTC=A;x+36nk--A-{> zR$mL98|X(-O%RUX;C98rP$|&-(;WmGBUZ{dP@o%brd$q`w?Rz2{y6d#joqBsnQ|@b;4X`tw zaDqC=Dkfl=aA+iMJ*%KO(gF*M( zWz1(!gsDm|^mn&Ptd5=c^K=9pRrsNJEl#7_M~2%^i_5(FJhGvrCyl4kskD~4(m}|= zlbr-VpM7*rUGWvIm>c`q5^f!T@Jy#A$pma?j-OLg_ol?jBp@PcH1V=?2a9Q<3?$>` zB#_9U%}QPB82rs3F8z36dUaVitHryU%g20c?k?{Wc)Qnm8v%!P0UUOyp{&W?ptkH(z>2hmrc5$3OqR zNF9)J1{$n*8!0qUXW9N14j-%f%)_3T##VOL)kgS|WXQ^!LHgW?$CwHu+Awq7JBqVR zJ+pYu;acQy?^C&<20)9o7-G?A4RxL$$Y%N=W2(tOz*_1)l4mhw?CSz=wo0xSrUYg} zJ3;72$lS$f9N5?NHQk45M*={s0v=`8d@#-FY3)jD(6(4kRZ&WEU5DjivVpS-m|Xr| zx;d=d%Q7i7s5jTsq{kj|gO{^=SmFKA4w?IJ1BI}fJKy1SJ3<`KsGHKSHfks>mM3!CRs!s}eSyEem+ z_Q#9bzFza3dNzQ#YkrsC6qeb}JAR#^c)2G3y#ij)`~JO@;_}Vuxxt&fBT*??A3Y!f zQmK2rhBRym8Qoj-xnOBjQ+xn;N*mE}ISa&qGZu|NFZx79uO|LEPM;Cb^z=j;yes95 z6+jqwe|;Gim%g06JS+~XW38i|nPcPn-1F<_9H5is7x#a10elPQG47rEhV`VvnV46H z@m)bxtFE=7=Uw_TPI2IJfv0AHMM@ue0Y0bYZrxp z=m9?Ly)D-x^8_5}a&QHB{fMXNY`)dTcfU-;ex9YXl74xjI%dtCz%0sxrrnq@yt7fX z`90M0rApItZ(OsvW8b_pqm#mr)eYvzM)b!WXOaUXtH!PAYK6l!h!^N`k8n`Npp&!ky#0ZDrUJzR-%xQeu>hvl5~5bTw;LX->3dWO zJ!e#YouMtJJdYPTfv>mf0(vQdkYe?`(Rh*u6UxJ89$E3|j8Fb=&-*4C=)aJ;{oB^d zJy_Hm5n@~=I==Gf3<39}=Y5%UoX<=YXD@eTJ*}k;&b*H08hzfg-7IcD|3V4&9+K1d zeHJfvVDNJg>TpXR#^-)!u;-$Vp7|Qtyd;9z@q#|-(?N)YynM!1-FAu^BXgxBMOSS% z>-i8a{ajzwNR0}e&FGiIyE^GI{(WO4RYSciW&4%Ax?{ma)NyoHWzTD0z||WjO_XEBaIG zu*YA>l`q7lkMJAg_1jztBz$i5Ew8O(rD6#t`Aod2GkU&0?oEOLZ_n<>?drL?*3wlI zJ2jHLSHlg8EP$qhR_iei-QWE$-`kxK&{F5$QW740p~b|+UZt_%JJSv~^t;3C#tDJjO_u4~|@un+(>ccYXByB<<7nfrNzQ?fV@8kMpjwk7J974(;x3%H`8&%9d94%+jeONyc8tiYHPg_~L|&)>8B$WdTF;(-*Eq=gf2 z2H|00Cq4^69UTY9OMsQD&{dPoQ6PDISLQuHApOGI(0W$6F<~-C8z=1-R}Oup7DgJ? zyZ8;u{||Xz85P&Et&0RmAV7fN?!nzH2?Po5?iSn~x*?F@H13iF0>RzgrI7$Z8h7`` z8hy>)XP^7*{&;uXf3L>q!5YP?TD4|XP5oxATjcnBaUEP7c!7ZU?wIu9weaE>hV)k* z&Wnsz;9xSZ4VzvKx&khwT|?B8g_^qX&ablYdgkUhmu6>}!~Cb)FnA~Q(pTq_9?D8x1AVK`EAE0r?bQyf@|1EmdJ2;roG*s&Cn?-$N? zh}aLO6@xzgj8Lj5x{<7=rJUI zjGbl{uHE8V2oCp=GfMo|w=hASV zrygkqdVA^{kpA>o2)4(;A3jbSLSOY}c{ zKn#PhFau}t*R?g8t+9-5#(ygxt^q&5dhBo^T4r z3tHFR6xLZ=%_QYrA7odu0v&~bQ>{??GtjN$PeCa~n#bW7z4Ex%w zYbl%hOtpv#@e|5h*NGnfV&<5=Q`tZ5dptI;6R<4yv6oN%jCQbfJ?XAiLd-?tMULG7 z5H*<$_`F!;Of_GP52wRu;edTqtEu0te9;g0p!Ech#Pea>imx0H?t~qeA(WRS?xxY@ zQ03isYTaX6!ExS#-Hy%ah-{=jKN#L@wDCOMv`aQu65|?!yyG{DAS{Jqb)ZOABXm#k zaC@TC)Sn!$zw_h1!=;52@Me()F_*Y?l)P)al%;%}s%$%HLBx~FMt`Piu_)PzH+RNI zQmD<2TTV`n*KYiadAWaPI0);zoyCPP@-wm&&$nJN&13pNw5hqPfFG+Hrq4 zAH*BWFE$&WG(jYJ_}Ul%Pzf_IeBwl^F!y5^=eijC z5mL0uLx&&yYY7Md#}8+D$+G2|#ZD2`QRd7<0^1u-ZCzJ7fG&mm0TlNZ)NPfK)5SYEhP+$g(Ut{iltaUK_v`kc%?2dR z1q-(eN(^+Wy)F7N8unwv?Qf2=6!a294`qvWJJ>C!X3Dv3zL($mc@eVv?cuOfoM2u7 z6V$KdNsvK2W;?>yMn|N6Oc{>ZJ+4k{mPKwAc7Thf_HtF>x6c+s7e}f3trvXOLoGBE zfT_n9W#fJWZKu*sZ4*rq+a@qnBW}z4!4#0Wl-t++N-?cT1zBekQmvh zi9@a|SQtskkZj{F!K8E^%j!-19VCy$1J}kZ+TAZ3311Ns8q=1-Q%sn002;KVm7W|_ zh00RKj%n_w5aVPQo%XHN@sTjd4KZl(mR~-S>Slm6P4JVL*AjPHG(liSHm&iB^gdHr z0ByKFLss9KT_0C%14q3AiR?It9ZNzuyImZZ*t|5>vAC+>Na4oR`U9OQLbk)`f*Oy( zzrzXU8QbKwwH;tS2_1M%laMfwaVh#(J-QOV6fTITW-=HVMfxbF&lYA6L=}i`Oq8U> zUt+C=Ptj==x(ko_cfc77uUa@0UVKdFrsW}ap`;hCgQV?@Ls zB0r-@R5)+YpWEFclFr;x(_}r!_@>^^9w)je)Cwb5qhSq@_SdGA{TM)9SZwefRxvAx z-aJzlP}#E|bg38XN?qM=e1POIp{v!StQ-sk`M}x4uZwFv@OIC)ArAo43 zph!0PP+=BeW8@;M;nT2*K~A*kb&bSngTBiiIml)IOL60F>C~pz$bO;S=3}(Ee-CtF z6_B05aIXb6^lW};bFMF#S;evv7mZNXB7A4y?%VxNTQSS9Yj`cU*W58zW^$*w5uD z0aYEHuj8w4*G69MWw(TX3uwn6`m@yK-1B?y1W#~|FB&*DFcC^dLC*wS<@ zbVPh)K6j6&qbdZ^>C}$>uvLd^+rqLql;4mnrvjg48;?5WU8DAw>)<% zbGj%9ZMDO9o{F$QQfK!)W#cRR?0Z+_p9pPi!~Tz?qqknbiA4z>?7ec*j`|oO9{yy? zb`L%~H=a{=J>!Rf2U{&C`5%(;;oU-k=44QB845%Bq2|VWT z4rxPcPHlGIwjs+3K7Xf6;gkK^_gpLrW%*$!Ur{~nT5WTj^xC&8SVXz5U-z}cO&W*M zBmC#}YI)P$kw_$Y-_roR?lvUC0q8z?SVylh5433~G^&x9yajghPX^zb?ZcVVZ(_hm0KH9XO7G2@y8B017TqAqk z`pYUCQX(|s53$fTZnGw=@V*(dxH{p!pXq=QN0w|ZpTW*Z!k?7I$FM~V+0U3&_Sio) zgM~eG52T4%-G$Vl%GNMcv|_veM&UFjI`a%v+EryQH~ z=XoHj9~qhE0&ze8!CWaQnG>pHD=Bix2(k=zNSsMZj33p!`Z+VJ9-mT6p}py_@y!E8 zUD-IbkWA=+gk7YNwDLom{Zb2lXmAm|c2I!2WQFB3xg_IhL}R=RH-XiBu|Gw=9yGq^GiGav zRVCLq8?NkRw>Z73hG^}WpCwS9uC(_%@1~1*JLsx*n2>dJ7ul3UZ4Gm_Q8_ax#|Fjf zDtB5Pbb3&&X3Vbk>OAj;emKCjI%8&@0?lR8^&g9s_co1Z7=ZFiTGWclO0QP$j&}tZ zN9$X7YPRf-T$A@KsOiqx~>of6I?b7XNO7T0iyF@e>dF z!-D;px?b#F;Lr?rs?CVa)(AtvZ2p#SLf`4ORYC$^R)LdmQc`Vnnb(VKPi4b^RRzi} zOh#|-`%9K`McTqv8+ppR1)X)HCa0vlt+mJlyCh_ zvXp|U=Q+maHf_Pvj7B}-;~3r#_386%!52a07sLx5ZX#_y7Bmvu_*IAH_8&(q7O*-L ze%u!JCp7KZS(U((KmQ1NKG+-it|2(KJ*X~`XnhP4h!@l*z!3M2(%vF8J?56PtPHq; ziZ0(XIW`_D@QXHZS9Bb4tqH*%lm#Jmr+$$G59?lAij5$`g3=!qXP;TX8Y06(HxYi) zEgj|G-9NJRr;Lz~^@4_}9#aS#w;}RG9)qnd{sE#&q8nw#9orz$j{f3c;QM0e#$9iI zW{1WTYaY6EUr|kgZ+cr|I!(-~qMuVEYoIiq0TS-S%t5z7EK3vA6~o?*(_`oLoltM+ zXKv9^*@%>Y4!m-+XHp0zpttdtxby!UX3&}lz5CHLbj zsn22XdaC=#qQ9!$il(3Ob@Bsf2G9%=F$#$O{2tkDUsTKfB$Ss6?_9@@XVO~F;O0O- zFCC}nCK(D`H@1Ao#TVRU5>R1DF{zc+fr+7?&YDH zRT+3T7x9J82CGlXZF#za)O#K^GgjC7!*6>pen0TFco=qc+Z_4>e2$pEYWg?xv?o_3 z{ZL-X_L82QH{^lL-7uNUD6W;-jct{t>+->7o-wH_FTUzJ6|Wl&2u9m#3nET>TcY{4 zt)Om9N9b{akQm@$Vv7u$IUU6$M9YUS*02NqT)&&|ex4=QAWr9gMqJ%w8Neb)X-@KG zW~Ce*JSsiNf@x!;i}^Kp;0>W=Z?g+4!_oEYc!LiXmiTBbl=P`9{Ul{N@$2?yRmjV& zxW696IdgI4V!m`87%Q__2mRAL0U!s6ji#$86CmLRpXnFn&{4|a5?u+@0DZseKfE_r5;4l+4^(i6z>_a4 zn;Nzj$$^|;OC)$Q+>zHF^yJYB>Ks@a>pDB!pP_=w6Wk^kbo)23!$nG_|C!a4ih)7T zoevz(?vRIyM)#=DLH#6jXdY|Y`5y-vDW@d%D+>;*?zQ1oy_D2O&X0e)mJs278h8m& z48DpDyGs!E+#^Y)yKRanu>Z;D^dfNA*-W2bN&5|UEdkt$yNCNiAa*EzQke(y-1SNA zMe3k?eE3?QtFe&(tLHB1fBO3mugM{GTx%Cx(?nw!1dR5fig0SeZ%ro|iIvr1i;Tfi zh-WbIaaYjfG@Iw28)JX+DG@JDVo!;Ed_fC8b}eXf_08jGT_tM(-IZ6qjR_HIox?ix zwvrV?MFakV6R&LslS8r`=JAb#gsR%B4$pG_a@!!`zNkKHUIT!OZj`$JFnpsSsw_fM z9f^UzDz7HGEZu?U(wlm4FwO_MTd{crPG0pVYQgL8m{aWK@O})a!^gD+WW1;j zTpGnNOnw4Dd(K3a8f%V3MaWb42{rx5xS_jr%)%s-e>Ngz@sVFDWGW@-6kvd>-Y~KE zIl++RpY{bWlOL}w{4WgyzgB0M?5lhmTs%_vRA`ta|J6-ZKTYVbU!Q1b7J>$8)Ied+ z1yohX;g|5D*hldKj!+7 z89uf0Yn4W$QytfHq`@?iig$f0GGTVP2C3n!Rc)7t|9j#!A2St~qV*2x?1)qoxkPOB zsBmQUD9JzvkFwRS74IZ37vEFAG>QtjU;S|6Fm;M7>D&g0x5wMv;7Rb|bOj#Yu2w{R*4jGV zBOlOY0&bDEgU(6ak+Nv7`T?g^fFV17N+2#H#m@8L5;HpY?Zj%(H=kVb@69YV{14fJwzc4k?L7XBTnfPgMPtMw&nN4jy{8udwmRWi&729Yn)CFnkW)MH+ zBh;$0!B%M6y+3Y_COTwJnT%?-cFK5$y@mApd^HqiVcr2o#AI|SWRrbSQtsFNI1@0pbxSPHyj{9UGyU{2W%t=yHJ>EJRloy9S3fM~(TM z_A-?)2mUgm^>$rhfJ2$^9x4zP$ovP*qiT#ShDlkRXJ%sb{upXoHY?|EFcOB65rdPR zvNz+(^_(w4)@S*DhR`b6M9Pu5e8BL&h0LT@HkPcFu+L??h=8sfgI1Mj#GMH6D1pH! zLi{UvmhCK!raRwz2)4|7`&C7^E&JMUS$xXQ07LiF^1jC!!@n$k@U(BG^*{6<_wui7 zbtIIp=0ktKCXL8*BH#0$dfM(JPn7MRJbjuBZl))(jXsL54)2#z9cePIj^it;Ui_W>xbEm&@jhyvElAJYprz;PcxMYPmS$811G~?gqXUTAzoHp~lWU&K*$x`9@0l=b z;+A-Th_TgcNqdg6hbzv})aJ}VAVN%x`-#NxVtwh)o&c)tMGp>1rPM)WWBcBi{gD;w z!QqB&e(~kC{;92JU$bW8tla7IxiGx*+3!XLdQ)6DWX zlzlKYrMA;5&vQ$0EE&PxPvYffQA0V#@4S-oF=oZzIF^ZXfBy$>b=icC%BKo+TltEmJZdyFy+^;({fafn2Fv z3qMJRG>yj%^8^3N{OAvQVEil%K4G92@?>_-03k8O!V4QR=eIOUK<<%vX4L9w>8&w6 znQ%;R{_0NeX89l)CmWGV+)ePCIt> ze5d@E9Z`*3e`VNt}SbMxuoe-{kyzfibd`?9d^_H0dZ+%$iatNlB z0Y4Z_3rAipUB`_WIny)5#V$n4_mdfz6VOOl;~f<--+8xAai5xNl5WfgIs_8uNEs-h ziWFg1m6Fd7x_Z!ZO&}-}vsiI-_*gp41+*lEi0@jA!32Oeoj7VDp6GhoOo6Fp7_fh^ z0F=hjp2%A4kj5#jv(1WJs^kzsWNyUfm&&`N_U&V6(!H?|h1CiScI$!4tZ*hsXEIw_ zD#M0mU3p(6KcL-fTa9W=ZOQM32Q^C|wQ`#;v^|;oKGzH8ei~zF1$^IBce95cN1XCj z0e~vR*F+mCx(0>@*LGJ2(??bd-p7vi=!w}hJ8~ruFpfWlxX<-6PF5?TLIErgpA(8t z(ByF+amb z1Sv#KyL9-qn!3V3(XXDxsG4h}hvDMwRP@o{QPUm-?1bayxST3~lYE!0*3jS76(*(j z=sa!P(Tr`|m@+RNe6iQ3_u0+ocM8(7jH{0p+v-rO&fJ(3Y?XNWM|~GYCS;jGTkk)G zj?@{as5rZEDCN;+q_oSpXpyxiMXwF4Pkw4QwISt<7B#KERd5^YA*P=fgHw*z>^#t z8fCfQlT~Oy{+ju5&gbr6ccihPmPI0^80CIUBxd%5n~rLuV}E;g&Q{X+_F1RW({*%c2qGEn4^!`i_veJb>V#%3mwD`7lLI6Y$JXiP@b%l+gd=@*k$3rZx?eM*y za029vPI_4&)94iSI&!v+j}Ww5bRnnS9N>VtVSI%VdDkj*l~^Z*AgzI>Bm(7R&Os&r zC{i-gUNyYe-;sN!Q`dqpKQ}aiAD8K^gSo;5LSZrCr;`x+RGF;I2Cjy2jyuwjlGeeM>1@ zq+b$;tAX1Zx&_@Iy29rT;RpuhJjTY&L~nixG7=qq4$qG3f*!N8Er($S_8Plw#|JF= zYy`9fE({?SdntC$;iwtbo|LW@LfP3P z9=}%%bPOAG>`B~pVj*|T)df;=-*BA`ZWC#l;E<~KOn4bvC~2Z;R5#B|t5_CaFIQ8# zO+{awtaX@uEJY#!@FidtX!+N@w+RIzM*`V4&V!RHMdwlP?C(5K=nHBsPs<_Eb0rka z^&;`Lc3KHJIoe6dDJkGNzG9bV+MpHEd9j~+FVd^O~lzgT{7)ZblsCxZZqlpoh*qfSC}WIRxh z8Z2-Pdo8R}RxpOKx}u^OnNZQ)pr_ATnU(|X>|Ud(zBm<>cV&80d}t0+uB8=a1^=kv zl;DI1{^6k+f+58V-=x=sczA2BiN9yBBU}fu6qeZdmp&UHT_uF(e?OdUZDLgxhctVr zFXWd{MGkjtywd#%v(TL9HEv&QuKNmwd;0KFbw(tnVou&LE#*R$p48;q5NTzHYc@)X zF~{g%8SFp;JAx9E`3K>uWK^iP=ymDWQMbQzj9Zdci()km4PN$?$TRX88e9_6@)>!# za4Q-3@bYYI&tJ;@jE^@H&DN_Ah?FH4&E`gdNBe0e^5FPvIS&fu7vkk9zjk)STE-16 z9?f(K=zDwh@}E>`#U%<%*>C>JaEU3NKlv_C1Q%|?1r;V4Ow>S3%vY~&nb=r*&rkwt z(%?H*Hld;)IsRkvm~v(Lo4+Une|W^S=3gf1f7!(PZ?o0@zwQVZOBy|Vk>k6^JONRV z4?>Jm-eprr&7ahc%-%i&)d4wG>Vin|HpEzmZ2VVUQ{Kwfgq$M7=2>5i=5@yUf; z8Ub`gL!VjNP)gOzf3*_s{VM*v8&lv@oJ=QuG&dxCORZw5Uxd${yA}p2`o*(R_v8i` zps==%PpS;hqhMSqqY$IZk&EnUw0PJpOnx5o=N;U{;=RH03FveCpCuv0!>;B%o^usQNKm^;90{kvTOgeJsIj*1bH@6gm29X8nF}MB+(axai#Vj(9 zPRkyRTH4Qfh{rD8IY+fI0vzpYA5qD?{7tx#V??sSWO2*WU^8qWWCh6rGq4>21PBZrNt1 zYN7xA?_B@N?`IA2LcCS6@dG-d@=KOoZpQJdW|B_S&D>bYZ%+yyQhTLj_R<`qNS$)EK*!t_B{sh{PlRsKk;$Cc~mlYGSPo9g(ou4 zoQmO76n1CGDq1W-e-#3?p?IVRZs-PP^iWu@7(j_2m8~|Kx_{D?n$=}iZVvUf*hf2# zKPf^J9>MSWEl$8!)h9w&1@|^7ChR1mnUxy$}1E`W5Q!Veag$=C4MT9oMT?X^FY^UQBxkrPn3ZPc8mO^p{22b@Qjdspn~7 z6fwZUr)G0+=rZIBE9@OSxGOelBcSELET5pamM!R*j6dp|~+8FT+`vY?8Os6kJQ%0f|F~8f|T< zYV)imKjxlC=?k^H;|hRCdNpT6!Rf(hF17^`(}I&ib=?L>q+IdEWeu4wJQ;4AhlIa7 z-~ATnUb}UJY>r%Cj>avx&tDjbIii2aYEC>JfAN!&q^Rxhjk7$&p4B_;QCK6`pbx`k z@uJl8WHkXFgYBZMqyHp)+Pce*mggBNK)TC-T!LQ@&E-h_K#tn#k23>;^Opq@DYx~( z#`1t3eX!WRp3hdbgQRF-masWO3_oMM!3}0wT3bgv!p8PC^el4BP2;%jrQp#uNvA9T z05CZf<`blHaTiLbEhWA?&X9P6d}431XYDpn+@K;;@Nn)iwiV1Fpn|?K-!**IPJR6A zA+Nb}+y_CX+4>Y<34mC&`JT8srnR*F=*GmwZjQe#w;RC*i4JY=gO4Xfgt1qM`bG2E z*s1yHza$D`%Uf|UwrO+BX&8NRV&iEylvE|pGBRG2j`c)d(eKYBCS^kL*VnG*7f7&- zoK{EIffehvvW5T2i|EtQPkk7CS$BiboEkMIZmR0)6k}_SdPv*M3KCUD7rUX5o_|KT z^c0Alcv+`IeJLq9HL3OF8ih3{#aH_?yNiX=rGz(63ej0Ao9bVV;>#jPEASY60jlbW zePp`B0Hu8v>FNkm=+bP#RIc@J7EGuT?(1u2ruW)>IgG`KYbTvB+lZY#wXC8ub~+Ut zzxYDhL?GE-2(F}o4L^9+Vq92S$JsF^Z!`WYSb8%ympzE3Ei5;V8q;HeTcur{>-(q& z@JC?pfCRrwsrfe0ia9x_I}n$up!QK%dojhxsP4Vdw6B5Y{=%JifX9_ifJbw1O$IK4 zgyqzjlfI7Ty@?6LM7yqch?=E9PuGbc4R%V$KqP1u{3oe8c80__fSoxweyCkQBN!PMXoLkD(Tm>-DYe|RIr>A8NAz-@Pwq-a_HIK6nvEsqWc zfd|t^Ck0xEhSEFD8uxCO_En4*v#AapPpYSfHPQumO#ONXX(m3U#g2V7AZ_Q6HGJDy zRH)!@!i+~=S#u~5-z*I>1OEz5BQBvt(&gu8r&&NXK}H?tQYBilwcN9@Gkg#0eS*!# zy@CKM9bnvZ6!%WJCA)WR;*`H&JuD(nk-FRLZ->;zDiz-JfPTc9odEta4^YB7vfsSB zrw(Z1GIs2ZP6G+B=Ut6>2_@G0Jjr%pO6})iKFf0=XL74){SvkXOFXGkPsUe;e^&T} z9{Aj@Av2&(WRX`dh{m&oZd-DTE_JVGhBp|MCB9}(g}*^|u3roJUhql)h$=6(33K~+ z%e9fIBt;t7jA^}ei;kT$S=7Si`2ytwy@kJEcFLn*y1tWm&=$irj}=${Q&`kq8(_o} za}K6hi;HYRx%;)JWxVJa>nV_$HdGm`lpjsXxxOR*=~bxJsOgdw%O}dT(y^7J(-{|y z&o3U{*WD*67VdT(&PD@HoVGy2o@9QuUbqXDHA1#;2rm_X5gY%r#TaM3X99+>c*3ia za^ke-qpcrZ`4s}ftIEbEyfz5Q#MV5pcpv+ZtG6`{m~^|omK*hYoye1kow>!dZLre_ z0OU*^wu=&Sx~7)bqLt}@n^CB&S91aInAP_Kfh8Y*Zh^&9L8lyJsqmz+JLPs-bCX`@ z+2#m%=D}*;I6Q#?v+b%x{o->7`PO+VfDs>X{(drn@GFfC4VP368; zU#{_(Hwz+GXJ_k_)#KRL!RO=2J_`Eq6g>t1U>q2MOXU|<-$N$Aa9t28{L>m@>PE27 z+2$0hhZuJotoLEc+YakddzO24f6HO91N;)8*YDBeRyW-X+xDzw^}d)w#>!m1h@=Z9 z76#x~;-t^rDbX|f_o!9TPPdQOhle1SK05P6tJMqriVB+fM>eo=Q6}4|I;Zt>C<}IJ zUqyhhAzMX8qj$@w~){g{S7lO@LW3BltcVsVrnm9|JjhR^=^}+u@{=;(n z-kbd!`oCMIJb?~}1fkh^J*Po)p3yI;yiRXU(;|~m3n_@Hn>J1%PLg$HeQmXUWWcEN zGw9x=AuKAS3fF0*in{(Oq$YJ~Cyjneev=R{{@F^TuhRK)0%)NydqNz~42W>6Lc%Mq zWCD*B-j|7~WaoVFG;vkoZ|kz(D;8?F*~6MX`dU%>5jw!UY@{A}vtRYm6{5OLkq0N| z;m?{;w|6Ihi%Y^Y zj7L@mXfcozK3skTr^vXS(l1sxkj~d3S3g-Owm*sETT?&dKvK?kRHJMHO#@Adf*?;r zshxN^C=Nd^OGd?~Th}Yw(y7Mc^gY7M?jF;?Q>kucnqm} z58hh36c19FUc=V#c|#}H{Ax?7~~4g}VD zRQoUVRWsZ#k(<|);T#y!-rJkhE>X@+l*R{@;jVYUe1pGWgd3aML*`9~eB+*L8I@8} zi;)JKWOls^+9Vm170rw+7tNGlXzv26E0`r>V|sSK=7b zEW*;uB}$Q^BB{SEpPytTy0UT>tl9L(+^dZogKsLS;cln@1!x;3Q*SU389eQ_&6WW9 zgxIhvbcW(slf5=45Z_=qVZ+*HMW$YT7~y8zXQ~Q&-Lzf_1cFZS@?%wT^Nbd1M9g{3 zIriQ|V$9am6_Z;Zs2*Xa7yucx(VLm)!ovaC+xboq`Y;GyW@*g!+<3Q8ogtl2ahs`C z3}SW`O!__iQ=wk?*7*f%XPZ?wp#reudFa4-Fl5iaPhuX_D6l*#lL6KzoU-OkC}Up@ zw|fAWTZ)XSrl&sjjmu*z7p&_yGmf2bxG5BOv+imJ(+<%W`n2pfA=cQUX&Tln_dl`f7@4(i+=(SerOZG>KA3_^D-6z92y~otg zXy5HKeiheV`uNx~7)9YJPH1$`%OV`J(Zn-vVwUi@i2PFg(EwHA6pw!Y5u}fr1%_?s zfWNG_nKa`cT)+_(V~OVx4eUM{pZ?C!;h-np@`lLC-N6gp%rI_OizeLE!$xHjIS zVmD`DSNl45sg8g_*t>U6YaKVD)#Pr&8d@XM@Ljt z%{j`PVv-=`RYv^#kC~zL)hT$(#e@#b`p5d>71{(kj(%YfC`ddL{e|pE_DYvQ@(X=|5 z+a7IL$S0duP%G-VthG=bKu3yhcwry6Q;jILM)W#IKR8}I*OK@!>~n@IEyj~kN`V1f za^2zp)7Dz*!O%v}*4dzI7kVbQC&@lcEYP*mxmmn}YpO$R&wGT~M60_YClr2_p_^sy zH#XCmR;YnF+~tZIdT33a_L31Ik(9z%)-2qQJaKJKT+o+>LK2$pZlf>V} zBu;uBBiSBN=RX0qPCi(~t4%cSwYmFG#SDP;K*hGlC6wjmH0J*F69ijmx;V2`6WQunXjCG}-K?wjCh&IV z*VNmDzfpz~jI73KPL=h4g2Srl{P z6`X_^Xp7~5v`T7mKDNa5eJ9|_K6c;jjB?9;k8%sFFHDAgPwVlF=r5G#j=-L)uVW@0Au#fSFR~j=l5LIMWbw;jZ*H86ZVl^^-V7^M z++Z@YhlY0_cJy>)O-G8O^&X#@R*TVluj!97a3R&te*vqi(!fb%2YfSQOj(UdD9V*4 z!(p$S_#&0vpzKVBeg60L-{hga?D3QS{Nnayc$P#)O15BNMmaF?(dk&JJNFH=6$C79 z5_yG0{ro*$P({>pvsguHs@$$lH!>{H%jZ~;RPqSANY7))#Bzfpf_vy!0;$5@Ye@A5 z|5|_3g+NXB>rFA;t7nM=xHNLJ@kT}2gqU)8alDaVIorpI&s)7TH=B-tiyqn>d&UD? z%9cfquCTok({lmhi{Fzi)6HS)8oeGn#x3y;`v;hN66WwPf%9!F&7|Z; zPG0XCL=t2x5HK?EY<(~AzR!NIl=}U(qu#(dHlV}R?E!$D2eq-X?i=>W`i?>{RYbA9 zk1|$$!$|tx>^k7Mjn}a9OYO3y;e)YOJycGYGjt1YJ+0Q8)h8{d%E}2ah#nh|S)-jz z?|c3{pI$;LHK{)v6P#La1?*ssQ+yz_gWyO6)d?M+^kskRj^nsk(=n~~;fg}!XIUv)Rg zg}VlGMnIUEnszFSF7dgrFDX82)~|2@n3}CHGBM%?!mb>zHo(a5=VgWH{lKAIPUqqL za2s6-mPh%nZrXvRN3p4{?#RhH2gtLx^ZJe`9sAJLQ*Yn}Wj0U7OFEm6UNn)$=AP*y zS;Ue#jC?B>Z8R!?nIibky7PVbeMr%EW7}=1Pb{RR@>at=03H2o^dm|I)fdoW3xus=|eE<4k?m;#&!zRNaS62s$4f#7;%jIH4m#828&t zb=Ly^<~T5cGZ0APP1bqwZ`$K0u}P!m_z#w_@gG}g^B?qN;yYGp9rFb@h8^-uU|h%TCy_im1uZx?+TR#Muoz#eWAOB09kPxL98 z+0;E=ZAi{5TP&TZ)5TA+!N3iM{IVTp& zbS(Yx0=f}f$&jX7UPe8?UVaaTp{~NUJk$521_TRiv`t>W3+yIgdc}0dA{EZ_sTCSlP_W*-8Gp=zi&n0ASo3bav|r=Z`p&My*BFD*NE(hg!2P2Shy(KC}>rWM;%k(OSLI>ZmaK1R-*;rsteU_JM=Bj2WN}<{l2XP4OAk~eqP2VlKrEt zeA*Gvx;qKPJ>FcB+ScEHrcH?uJQT;EZ|oZqvfAx^6WR2VY#~jL;Vsc&sKPh(2Q!At zW4Ys1j;h>KPHB%wtm{x0wfZ>#fv9jYjwI!3&Uc`h2555G_{oC(oDWm~U@=Y2Fl5!= zq=EJN=8bS{-yR#v>*WlA{mGJWkYX`^>^jQ64g-ri_Jm;F3u=UE>gfUtoXPLx9tu>v z$uXsovdWOoU6d#)Z%9^C#A3hyH9EKK{S9Z_Med!vqm`Ir0|kG%&Y%~HBf49g>E;bG z@29!hWuPuB85=Yo+;1Lq*t>qY;^}vCJt~DKzCe-jdoH@+y9+O$q^qvjK9`Y!%PW>& zHwgS+bTc4;VN`TTwOFKj?Vbu4L9p`%o<(BTCyYg)dwo`4X=*1vW27yzLfHq-E*Eg^ zLG#A0bm+&S+0S}-cv|(iydycxbZ`}>5-A(&4Aw%A_(I8`?73qOFNoioBF^>e+LBdH zwu(NPoWA_leO7p+L7&9lqBIz9KP`Qi&GEZEzj%)3LV+bDbt`SR2`^=m!TI6%LYGL1 zG3MJD*o3g+Xo~K~?PJh8ykpubrDtRc=Hust*A`AE)c9J12rZ=6oX$OI%2+faFSj}Y zwPfe$Ty$}6 z*{6yE8BUhp&~XKQ$fT_ zAy}ohZYQ_)EeO-t78dU!*}ZVpM?U;aXWPLg99)W8YzQ8d7-1Lwpz3Y-gO(Wwcdctv zP|TMn-6%W~rhoi3Bt3AqYm@s#m!5@xyVRJVI~;7f_T0t9?3G$G-(kj=lpAZ8i8n`B zB-(B(&z(N@Pb2=b?D69x21XezJIT_OqpYXbDmN?vL+P!shkHDPOP2{FpX8l!+SKH` zSEgDNe*CQFOX>IYmKB4KW^5vBjKBG}$(Lr`Ojns(xp;L3h1xn!mQ9Gn{Q z5z5Cx;a@iAM7rWO09Yji(JAgKS?pqxAGuq-49NZyID_%1&z-xIlqyw?jdpszm+;f= zku7Pa_Q|P&-(UF|3I2s2b(L5zttKtm4eOGuNZFm{U+z_KZW=aVSr90-@m?3gnl;<( z{v=St3__^9U{8^0_$A<9&bz#Ye=zf||D0?w^iP%mlK{Hx2Kv^@T(Z~Xdd#xw#Sg&f zkCy$E%^AlhjhxG}G38EtUx3N*ThE7D4tN7s57oD7@BYiO6XoqE_;I+T zfQX*BYRCb*&2~y=`J=qk@`vVdr_**Xtxg1g-AE$nZ(P8M0E*S?R$S}f40b~Oq)YD= zES@bz{T!CP3CvFdD+jB7e%YOiF@zR(V^Fk0R+!G;!A= z+mGiLo2Q0zxJKExFPfEuHlZf4j_p&^6pa@u(znurhL25QE)I9J=g_z$(XXM&{-9pi z7>D2|VGAsfjMEC96P}}uO3L2RVM|J4PpEDI(%fq}FJJ<;pI1Z}3`Xs_lL*HrOR$^R zKek2rFa?Kw<@>0Vk0X71EwiK75F&?s%WZP5;Mvc>`F-9aHPC}I+GBZ&u`KJ6M`Lvh ztMtuWY*)P{Dn=sYk-i=j%8_t0UTuwiL+{X`E=-ERIP>LjGwG2S9JUIiuhjxddqxT` zF2nZbU;B%H5r>OPMy@1vza^`?RZ5UZ`$qj;O4JCJw5-c)pwwGWZY&$P@z9@AU2Y;^>?j;1kuUrRyWsgnW>ak1AFiO^I>L4TqqVPqigH`~C#0mrp;Ji#X{AFE z>5vXd>2Aqk2oVqj0qIm)kQ$^@T4D(4Zt0;JYQ7oH`OZD}e>nHvZ~ZT8u~=)?%X(+O z@7~Yz{9-?wlU~fc6T;HDrlti?m=wR##6;#mpPk2;(DJp-_tbl+!9Eie4gSLJ3<`mt z4@RnduSBCTD#;L6O>MUu63Zfko|86S*xQXKL%xlT5S-C%Tx_2o z%s3FvCRgzZf1){G6>VMYl>}Mnu92and@t&=ldwP!-kL~nFVJ((&*?irPUYZ1hLsIIyh5GttW2S5UQ;1o+=R^K{yv)gY;k^9D^NUj?zC^h!Zf+?O#8Kr3`!crE#fBh+IKhaoF7v?+9sDVV~YJSxxG&O@+;%LQa|HPPO`7HZj5X{ z+wg3gXO?n#YF;+P037JdpZ%zk^7y4NHtsu`x5fOcz+U&VHRoGYz`EOYl6ut4?HpW< z?JNYj9q6a)bhQ0hoj7=mN%(V2Q_)iIAZu3RLfZ;bG&4hS5#aK!Z=fI*;7;f`~I5%*m)a%9r@hBsgL`Z}WMV8NPX-X7Sj z>gIroi_SzYO!scYd%T-@(6bBD2r8ij58C|?b+#n7<+$=dP-#?d>FJChvuJuR-7^~U zQHK~n2iOcWd{C948=_V9=(BXzrkwsXT+x>L-1jzmY-1bXCk(NDjHrM-VzrC|(om@7 zr@pX2WVQ=Ww^>+dx1OrBL(AOeK=kK1f6<7~g^cF(Bq6jyNO(jPeRDcO&P=I)m$Qm*6F5X(ekF z?DRIa{bECos>a6p3k&j2^ed}2wV$pW+jUQrX;TFgr>HbmtyV3I#B83Z$~CHr#m{Cf zu9Nz3a5~b8Xmgmob`cH%8jo&wA0|~r^E=#C0CS*7{xj-YGm{=-$!Hv72mf;{qn<(J}$x zctVusI(yE&C{C8nzz=5zUQQSHgdgWiwG_t{iFmVDk@cGW+EVg#9&qcTrj43yW|?K4 z9?Ik#nVIt9Vhtk%E2FMSh_BIQz|WBu&aRbD$bu_hIS+BtEo=#`<9N3xF@ZPKN+)aC zjMAhc3iPHpSRwIIDHpml#LUAE4Nz+kf}?{qtaD-jT%3eSJR@<~8F=>@pVjxJ7Pnfj z+>Fh+F7%gB`=Oi!n=QEvMYmFTwnwLd`0Z&(CW505VNq&c#h+aN7z$DmF=_kWZ`rPn?3O8vXp3OS|bFira8`f(RE(TurL3y+HSe zv2g`h*3?sP?x1Ym*;FqNT_1=PX1+tI&^h`%J8!F5eOgYpehcQ!Ami(Qa%bu__5E_g zkD%_pqE7s=)>RUASaGwGNxwjmKFA+aiR%2kGi1sXpjSsE@F+6ZLYA}8QmiAKo!}L! zj<~1;A?=@%&q2wm@#u2xB!80irqBzkh5iZIjZt5aAC7p+>QV_dSl`T#a&a3w7#Ea#7~o-1t=N#t+0Z1m<-Foonc; zfkUE`sgc}@mJfF9kJqt(F3L{o@$T*_>M!$NsqXUYrRzX`DmpsjnuE*GFpa19bG?`w z-4|UZQ^V*`>cw$e*V|D46^v6&DvCk%6zDge4;`kXHmA;*v-xe13#i?zN^?%bN*uo= zjPE!$mO;M<-L3o33u^pP|2~hU>3$!@@eq7~<23~zB-=y{mOQC(up0Anp0$yE`^()H z_c5)d@sqmKy;He&q5&-dax1pQOhr)a8W!{pv+Df2w1HLg#Qn*t`ld7%a1~4bm#K9b z^MVC>b^Ip5Le!kl)1Ux8AdN`Cd2ipvfQ`lqBSxMts;Lmph>bdUU(O|p8X@6`UgOF5 z5NZO;W}9fTk~J2s2JQ%MPqut%adcEZ`_(ehHv!qgQDdx1817&O- ztVsmrSd~B9SHUf$(d~VoCGf}2qVat}WymT6g~wvvI9c4W4hj`{=s_1Gjgdp|w&n8uW~9 z!9C92hp%Z1jnevZH!=6Ve-=m-E8{p0Fhzab^I~)l$K|&)FJ*om^keQA_#Zn1`g)@i0WgLWrqGbn`OV%_hZsmD81v&tcaXWMQn)? zFm!#k>0s?cQ!KY*9SDtP=%XLuNzO)IEJcMKS8)Gk_0p#TQBhCAEK9!@92852jTMQG zpc6}%@43Bx{pU)OudHS|S|;;kpdb;IC*NL2r0_)0+BPm6?JAIoV_5=SOz$TqWpE0b$1|2cXxpP1bZ%(o=MDxRzh zGVFVZl3L+eD%K9qTecX|XH@tAk<1_trknN3PH|=xPQZY8muGs}STc?`!_I~6T*N@ViYuqy> zDzvdZL}BT=yAo^70Lyk0M#0P-4jX>tG-@RmGHyey>$U9Gyux7mRP<7Q1l#Ru39x(} z6_#TRG@dLv2_ge%Ge&VF%CTId-)2F~CH?)QQxGVr<5a?8ziZYuUzIQZl;stq7Lb-v zhc(6}I1yLOtZEd0SkQ9e?{h8Ohql$H{_(f;R*hxmAo@-=tgZdQk#dO-#n;saLt zO;ye|>kp&oxSZ&&ojxC{a0z5NS;>XSJopRNCAc#+pXsOgEn4j~dGai1@&?+CCY%5; z9y8R8qGCbp7)uO7^wKi~y@$lXk5?Wx*5mH3;aP40Bi8lXY-4GS-6OGjH&2%;Dj%Vi z+S%!A+!g|lOl8XJJm?sy#GU+MvEDYDqYir6__dd2I;|#W7zstQC2Qt=W(2D2g3Wak zh$G0~hFK>k5|Aw%Ijp4j;|m_n7%H3Pl+Jo!ow(aR$wy5b38gtDR)_yQJluVDk?J_6 zrxxz8)VR}hFosj0u-53IYTrs(dF#T(>)zb+L-k%1(avs>1q{XSDRX_>O(K_ev0N#1 z1OoHNa69Fr4_~C_CKG0zM<67C>-{;{WMz{3M;#@n-b>m!u|bw^hL1|%GTP0RzQiPE zVAE~=+@=$ncq&pAP^>DUSf6b}xS+D8e#lijdsy9tqoptzq7!spQ_DD_)plt@3caPd zvI8)r4TB2SZojw_fp}VvcH=#COD}P*E;i62X#}_lbHS!1gk=7Xggt8@sqhd4an{p0 z$f2Mc?-z=&4Fx*TKPkY&;FXDA+NqhG2V`P3%pX!)AQEEc~hRAr>FB|B}WR1l~2!C6i=YI zC@t69PmjrJ%75u0s{812YW4^B*lNWwFf4Z)8|O)46!r#X=n|BLdq8lHs$tNCXB1p& zN4Lhj%$bau!!>xxj%=9FKe+B*$g`YSNU0ccI$IjDmn=Pf&SfsDoZRmXJ6woTCZDqr zV2xGQr_g#_M1CPVT7Ka*#?@0Y(}*moOH@$*tp0F%84$C41of))6J^N4HyRR4DZjTD2C@IS9M^h zc9+fqYgAT>tV@iLK&OkZRz46R%^|Adg0Sj!8s!;l$37sC+3Z7m& zEL-9vy?d-#K!(&}kU?8D{}s_0tV^D@^_fCr=ih*W&GdUks*o+1K8`c@Mi_lNwF^u>U4@8LW+<7`huSbx>%UX`T=6X%u5&OXOl z*!_m}1Sqd=pEO^i&rb|}C(JkorOL)bY@F7Ry+bm03c+f=Z-F+=tqF3MNJ{N8hJT(l z4c|X@^ZW7s4|s%m8?-4#uTX-aE{)>M1W89wcL!0^@Kcv#lC9Mq zrI5>#Pn=6<(7KOHNOXQzREdj?9!$Wa$}cjS5oQIv-=g2$*)ModoL=U++wJY)9FS-+ZSKkz3wsj)tII$B-bkB){Bk9Of|L+ZUlZH>y zYNOlXchKEtlD|RO3rmbaEb38m#b#zObqB(#ILLC51GH z63eiuleM}T+WNhE=W0r<3nAq`ooeSD+NI2iNQh}40v2-nDkyV^bvROCRIb)*B+*&r^rDC5) z(W?wLA-~{1u*Zs~!8|Z-ifns>Xf;>3@i=jl({WciE`?ms>&^%q#t70)0YDkj%nj)B zF28gl+4?pWRJmX?(+NzA=8KU9CrriM22U}GvW;Mq-iZx%R^sL#N-RuEsI?X03r||L zQe{Cg25X$&$~P)i?4q7~-uP@-y$)#vfT_(DwAyMLiV+9XuJPB@ehz5r{HrDlnJ}n! zqJE8Ax59jA_Q9!~+w=)zM25kFsW4~xuS=Gqi=b?eD@ z6m%9+q`ps}h$m@Nl?wzzGcNhA_K}fB(t6U7mr(zJf~)sD(A4rrI)xGUSO_vrI7+cI zl0>Je{0J0!ltB(DYB%lfx%l|l=RkqByDTd9_y<@jingUqCe{1)y5Hy-Lfa^4eN@1H_^4Gx{XCz$^3AK*S_f+ZH|nrCZ2 zzBILhit1lV%=blfGuBsrWFtUV(WAf2EzT|NJ!7pGZ^BrT?q5j@5nbNVYfs(^1V`eh zuAOcKk+W;5z&|oz{9KMd|>u-SQ!9JImpajMoEVLxt zp-EAUFL6Uw3!xVxHT#bFJwpFI(P|n`GR2L>M3GKUM(cZKD8rPv#ZE-kM5-gYvGlZJ z#$fCEC|feO3IT;}Ow!Q?Wx1F!OjJi4>g9W?knp!DI*rXh2RRZp?yny-q@>-2@oD6G z=*YM$K}H?*AD>FKQZlg&V`l*`7~DAYzh?&kDY(6v>r4TO1KYxi&BBQtwYwXDv3@wk z7bn*cEn<7wg(==3qUO-~6LieGMX{ASAo zZu8&qb%gJfOhzp#>A(S!$99lKjjzH#dF2zXo20x`jIz^|#mq&hF`~Q62VUs3AeTj# z=T7I@UKK(#CW-worl+=siSpZ{E54b{&LvfMnO|hh+E<8{r57a4E|T{5{OdHa2g|@j zZ{7rRrz%r3$(e)YDWMLjEzLxzUdZazMf1fIwfhq~l>ro`dhrO#>_M|sI9aQqhZJ1EQj?hJ3*I13@FsOcF zw$MS}p%r-FJ+l#)>U92qO0AKp_L)&_$0}Id+ymMYNTF2H)_(ETO9!9rp3Xk6 zgn8erFZ>E0}XZLxw%pp!h38Q!%VQEOGka)sPQE0@x-n!*~)gqq!ebHkn5$eec!gDv) zsgqMChr|F;gON*T`HRJ)D0{CRfs!ANY@(upTbFa{9HRAt_EYgnC>0GWS_r0-jy8&{ z_0&CqsFxXCE_o=eYlupGdcBwYOtb*Cgp4#`zfKE*GP4&2|KxxhyB&rwh`AEs0LkK% z5iI}_vc^_L2=b0nQvamhUw`uN_IYg<$m*rYM=c+rESO#*MHy`kR)(H9L8V?o-i2duc3Iuzpnf}QM@9u=*?3inMz>q zG=(ErXs%<>jvU~BO=GJn^5~7`l2&j?sHdMn&?|wn(e@QE6@F!~7#lmCUD9rPmZ=5|`mk2v?ar}Jn z)%{(kaWz5d2COqNW8$N9O}OWxBfPEtyvb^zob}ZAHqiSxQ{@#~-f?~3sgKV9LTC4u zI3^OhI>mPSt3iz1pcFaBwJG{`Zz{htJ-3_x51Kbt`l53G%}Rp{b9(7aa5V>A@N%|NBFkYyb~ukNj9B&fUBg!4g89zI z_}^mT)V@`vk}Q)lO!Rgguf3s9oF+i;Z;V|F^;>A84arh&a0za!MWEDZn17JV}V2!FtX&v;LV5LbJz-x zs9CcR&)C!4W9!c0aT25<_s17hMDM8;imBcmQu2zxCsnU7q@>hNqrMK1@b>FUO>)VS&VDt?gpzA6FcDVl;%P*ewa~_xy;3_ z$<_K+DxW|CndToB&-bw&(Fw&Z`)N9jJGk&^^9oq=88)ldUT{Hg5|<@Goae{di?f!b zL;OGF`a9ZtPmSly^?=B55X~NpusE#*PydWJ`j<2f5wvKur1^h!VjA7*;Uu0zFEw;S zu8TtMfA%=7+}1F>_5f>8ciz4o`mR{z>C;)=b-nx18D+0I=|Am$9=h(9NuQ90cDH6< z=g%Ie;EDo+>xlM$4}^c{##9ZR$c{i#Y{b8}2vu`%d)5%>@?Q&iLP@HJ@~KZZ({^L$ z9z=FT6M1?xLKL_-8J4o~|K9F%p(J%dI z&5*zP2JagNHKo``F+xXa>5T7;?m_B#f3f67rgv7QH@6_I7hQ3RNMxd#ru_oMGtbZ~3~+yriGVmR+78z$J#@3D;gXag!BFN9M;dqyTEbq2k`Y&#xHx!%m<@GOGr&nI~1D<&g zb1j&cC%?DwJ$V2Eo^Z~l)exV|WK~x=Gud4E2?kO6b{A=ny8gsux5Uq|D|)(CEN~ai zSR>D1P3xk<8-wEZb>)qGX(Z9QA&C!CXg1Y^Ui`}X^@I{}s)I4v+SrXG#jV+!3PNl@ z@m+Js7?sc>dVR=vQ5L`@qpcUrnbos>#mN#aMUpw4KlHB;2+g zgRl-@kh{J&_?fdKSVboDMt;FN z{N)2gkmN?0s^sZY+xx0{m-Y{lR9Rn{%IL}A{BJhdMPj-Mc8OQ#z}R&sCnB%_9pS0p zi;7Et*6QVZ_|}Wq-NG5UESikH>HERRCR6kz@6lpzcXyQID`qjaFF2{~4~CB&016Gs zJ5s+k#z><6TjkOpW%;7svjH;>eUU7$we8rOl{Qc{E@hXl>pcbCeo2=P`J_9I3eY># z45O_o3Id1G%BRTfFSZQaE zf$C&*8BcZL%HqD3T`-Q#`}VVdh=*2#E6T6F)Z}Y`F$@;zCsN9&;H0EM(3fv{_;;{K z8$~m;SBq9^pPIsUf5jXDO1pH=aZP|jkGs;@`%O7??I(OJjpjXuToZ7;mg7u;rQh;fn-%Sp8ey$I>c7~o z!4u(c`OOdGe;umxfw%mB+psYLcwW-Rm<$vQ&C^bp`|(D%PDCB4fe;)=x>_xY|R9s>>Gnl_TMVhgq1et4#82aBl3ir;p5NFsXjQCtf}~1d+gg?6vU6*b7iSwN#nr( E0UQjieE Date: Mon, 25 Mar 2024 16:01:21 -0400 Subject: [PATCH 0946/1112] adding api for new mdc processing state db table --- .../iq/dataverse/api/MakeDataCountApi.java | 54 ++++++++++++++- .../MakeDataCountProcessState.java | 67 +++++++++++++++++++ .../MakeDataCountProcessStateServiceBean.java | 61 +++++++++++++++++ src/main/resources/db/migration/V6.1.0.8.sql | 10 +++ .../iq/dataverse/api/MakeDataCountApiIT.java | 40 +++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 13 ++++ 6 files changed, 244 insertions(+), 1 deletion(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessStateServiceBean.java create mode 100644 src/main/resources/db/migration/V6.1.0.8.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 08e776a3eb8..38023327274 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -7,6 +7,8 @@ import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitationsServiceBean; import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics; import edu.harvard.iq.dataverse.makedatacount.DatasetMetricsServiceBean; +import edu.harvard.iq.dataverse.makedatacount.MakeDataCountProcessState; +import edu.harvard.iq.dataverse.makedatacount.MakeDataCountProcessStateServiceBean; import edu.harvard.iq.dataverse.pidproviders.PidProvider; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.pidproviders.doi.datacite.DataCiteDOIProvider; @@ -29,6 +31,8 @@ import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonValue; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; import jakarta.ws.rs.PathParam; @@ -47,6 +51,8 @@ public class MakeDataCountApi extends AbstractApiBean { @EJB DatasetMetricsServiceBean datasetMetricsService; @EJB + MakeDataCountProcessStateServiceBean makeDataCountProcessStateService; + @EJB DatasetExternalCitationsServiceBean datasetExternalCitationsService; @EJB DatasetServiceBean datasetService; @@ -110,7 +116,7 @@ public Response addUsageMetricsFromSushiReport(@PathParam("id") String id, @Quer @POST @Path("/addUsageMetricsFromSushiReport") - public Response addUsageMetricsFromSushiReportAll(@PathParam("id") String id, @QueryParam("reportOnDisk") String reportOnDisk) { + public Response addUsageMetricsFromSushiReportAll(@QueryParam("reportOnDisk") String reportOnDisk) { try { JsonObject report = JsonUtil.getJsonObjectFromFile(reportOnDisk); @@ -200,5 +206,51 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE return wr.getResponse(); } } + @GET + @Path("{yearMonth}/processingState") + public Response getProcessingState(@PathParam("yearMonth") String yearMonth) { + MakeDataCountProcessState mdcps; + try { + mdcps = makeDataCountProcessStateService.getMakeDataCountProcessState(yearMonth); + } catch (IllegalArgumentException e) { + return error(Status.BAD_REQUEST,e.getMessage()); + } + if (mdcps != null) { + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("yearMonth", mdcps.getYearMonth()); + output.add("state", mdcps.getState().name()); + output.add("state-change-timestamp", mdcps.getStateChangeTime().toString()); + return ok(output); + } else { + return error(Status.NOT_FOUND, "Could not find an existing process state for " + yearMonth); + } + } + @POST + @Path("{yearMonth}/processingState") + public Response updateProcessingState(@PathParam("yearMonth") String yearMonth, @QueryParam("state") String state) { + MakeDataCountProcessState mdcps; + try { + mdcps = makeDataCountProcessStateService.setMakeDataCountProcessState(yearMonth, state); + } catch (IllegalArgumentException e) { + return error(Status.BAD_REQUEST,e.getMessage()); + } + + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("yearMonth", mdcps.getYearMonth()); + output.add("state", mdcps.getState().name()); + output.add("state-change-timestamp", mdcps.getStateChangeTime().toString()); + return ok(output); + } + + @DELETE + @Path("{yearMonth}/processingState") + public Response deleteProcessingState(@PathParam("yearMonth") String yearMonth) { + boolean deleted = makeDataCountProcessStateService.deleteMakeDataCountProcessState(yearMonth); + if (deleted) { + return ok("Processing State deleted for " + yearMonth); + } else { + return notFound("Processing State not found for " + yearMonth); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java new file mode 100644 index 00000000000..f49640214e9 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java @@ -0,0 +1,67 @@ +package edu.harvard.iq.dataverse.makedatacount; + +import jakarta.persistence.*; + +import java.io.Serializable; +import java.sql.Timestamp; +import java.time.Instant; + +@Entity +public class MakeDataCountProcessState implements Serializable { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(nullable = false) + private Long id; + + public enum MDCProcessState { + NEW("new"), DONE("done"), SKIP("skip"), PROCESSING("processing"), FAILED("failed"); + private final String text; + private MDCProcessState(final String text) { + this.text = text; + } + @Override + public String toString() { + return text; + } + } + @Column(nullable = false) + private String yearMonth; + @Column(nullable = false) + private MDCProcessState state; + @Column(nullable = true) + private Timestamp state_change_time; + + public MakeDataCountProcessState() { } + public MakeDataCountProcessState (String yearMonth, String state) { + this.setYearMonth(yearMonth); + this.setState(state); + } + + public void setYearMonth(String yearMonth) throws IllegalArgumentException { + // Todo: add constraint + if (yearMonth == null || (!yearMonth.matches("\\d{4}-\\d{2}") && !yearMonth.matches("\\d{4}-\\d{2}-\\d{2}"))) { + throw new IllegalArgumentException("YEAR-MONTH date format must be either yyyy-mm or yyyy-mm-dd"); + } + this.yearMonth = yearMonth; + } + public String getYearMonth() { + return this.yearMonth; + } + public void setState(MDCProcessState state) { + this.state = state; + this.state_change_time = Timestamp.from(Instant.now()); + } + public void setState(String state) throws IllegalArgumentException { + if (state != null) { + setState(MDCProcessState.valueOf(state.toUpperCase())); + } else { + throw new IllegalArgumentException("State is required and can not be null"); + } + } + public MDCProcessState getState() { + return this.state; + } + public Timestamp getStateChangeTime() { + return state_change_time; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessStateServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessStateServiceBean.java new file mode 100644 index 00000000000..5d7ec8ff047 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessStateServiceBean.java @@ -0,0 +1,61 @@ +package edu.harvard.iq.dataverse.makedatacount; + +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; + +import java.util.List; + +@Named +@Stateless +public class MakeDataCountProcessStateServiceBean { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + protected EntityManager em; + + public MakeDataCountProcessState getMakeDataCountProcessState(String yearMonth) { + validateYearMonth(yearMonth); + MakeDataCountProcessState mdcps = null; + String queryStr = "SELECT d FROM MakeDataCountProcessState d WHERE d.yearMonth = '" + yearMonth + "' "; + Query query = em.createQuery(queryStr); + List resultList = query.getResultList(); + if (resultList.size() > 1) { + throw new EJBException("More than one MakeDataCount Process State record found for YearMonth " + yearMonth + "."); + } + if (resultList.size() == 1) { + mdcps = (MakeDataCountProcessState) resultList.get(0); + } + return mdcps; + } + + public MakeDataCountProcessState setMakeDataCountProcessState(String yearMonth, String state) { + MakeDataCountProcessState mdcps = getMakeDataCountProcessState(yearMonth); + if (mdcps == null) { + mdcps = new MakeDataCountProcessState(yearMonth, state); + } else { + mdcps.setState(state); + } + return em.merge(mdcps); + } + + public boolean deleteMakeDataCountProcessState(String yearMonth) { + MakeDataCountProcessState mdcps = getMakeDataCountProcessState(yearMonth); + if (mdcps == null) { + return false; + } else { + em.remove(mdcps); + em.flush(); + return true; + } + } + + private void validateYearMonth(String yearMonth) { + // Check yearMonth format. either yyyy-mm or yyyy-mm-dd + if (yearMonth == null || (!yearMonth.matches("\\d{4}-\\d{2}") && !yearMonth.matches("\\d{4}-\\d{2}-\\d{2}"))) { + throw new IllegalArgumentException("YEAR-MONTH date format must be either yyyy-mm or yyyy-mm-dd"); + } + } +} diff --git a/src/main/resources/db/migration/V6.1.0.8.sql b/src/main/resources/db/migration/V6.1.0.8.sql new file mode 100644 index 00000000000..b8f466c0b73 --- /dev/null +++ b/src/main/resources/db/migration/V6.1.0.8.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS makedatacountprocessstate ( + id SERIAL NOT NULL, + yearMonth VARCHAR(16) NOT NULL UNIQUE, + state ENUM('new', 'done', 'skip', 'processing', 'failed') NOT NULL, + state_change_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), + PRIMARY KEY (ID) + ); + +CREATE INDEX IF NOT EXISTS INDEX_makedatacountprocessstate_yearMonth ON makedatacountprocessstate (yearMonth); + diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java index 7a113fd4caa..dbfd853edd1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.makedatacount.MakeDataCountProcessState; +import io.restassured.path.json.JsonPath; import io.restassured.RestAssured; import io.restassured.response.Response; import java.io.File; @@ -7,8 +9,13 @@ import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; import org.apache.commons.io.FileUtils; +import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; + +import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -171,6 +178,39 @@ public void testMakeDataCountGetMetric() throws IOException { } + @Test + public void testGetUpdateDeleteProcessingState() { + String yearMonth = "2000-01"; + // make sure it isn't in the DB + Response deleteState = UtilIT.makeDataCountDeleteProcessingState(yearMonth); + deleteState.then().assertThat().statusCode(anyOf(equalTo(200), equalTo(404))); + + Response getState = UtilIT.makeDataCountGetProcessingState(yearMonth); + getState.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + Response updateState = UtilIT.makeDataCountUpdateProcessingState(yearMonth, MakeDataCountProcessState.MDCProcessState.PROCESSING.toString()); + updateState.then().assertThat().statusCode(OK.getStatusCode()); + getState = UtilIT.makeDataCountGetProcessingState(yearMonth); + getState.then().assertThat().statusCode(OK.getStatusCode()); + JsonPath stateJson = JsonPath.from(getState.body().asString()); + stateJson.prettyPrint(); + String state1 = stateJson.getString("data.state"); + assertThat(state1, Matchers.equalTo(MakeDataCountProcessState.MDCProcessState.PROCESSING.name())); + String updateTimestamp1 = stateJson.getString("data.state-change-timestamp"); + + updateState = UtilIT.makeDataCountUpdateProcessingState(yearMonth, MakeDataCountProcessState.MDCProcessState.DONE.toString()); + updateState.then().assertThat().statusCode(OK.getStatusCode()); + stateJson = JsonPath.from(updateState.body().asString()); + stateJson.prettyPrint(); + String state2 = stateJson.getString("data.state"); + String updateTimestamp2 = stateJson.getString("data.state-change-timestamp"); + assertThat(state2, Matchers.equalTo(MakeDataCountProcessState.MDCProcessState.DONE.name())); + + assertThat(updateTimestamp2, Matchers.is(Matchers.greaterThan(updateTimestamp1))); + + deleteState = UtilIT.makeDataCountDeleteProcessingState(yearMonth); + deleteState.then().assertThat().statusCode(OK.getStatusCode()); + } + /** * Ignore is set on this test because it requires database edits to pass. * There are currently two citions for doi:10.7910/DVN/HQZOOB but you have diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 080ca0c43e9..ba36911ffae 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3135,6 +3135,19 @@ static Response makeDataCountUpdateCitationsForDataset(String idOrPersistentIdOf return requestSpecification.post("/api/admin/makeDataCount/" + idInPath + "/updateCitationsForDataset"+ optionalQueryParam); } + static Response makeDataCountGetProcessingState(String yearMonth) { + RequestSpecification requestSpecification = given(); + return requestSpecification.get("/api/admin/makeDataCount/" + yearMonth + "/processingState"); + } + static Response makeDataCountUpdateProcessingState(String yearMonth, String state) { + RequestSpecification requestSpecification = given(); + return requestSpecification.post("/api/admin/makeDataCount/" + yearMonth + "/processingState?state=" + state); + } + static Response makeDataCountDeleteProcessingState(String yearMonth) { + RequestSpecification requestSpecification = given(); + return requestSpecification.delete("/api/admin/makeDataCount/" + yearMonth + "/processingState"); + } + static Response editDDI(String body, String fileId, String apiToken) { if (apiToken == null) { apiToken = ""; From f263a4e698e63fbcc5dc5cf017cbaa4cdf44deb9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 25 Mar 2024 18:00:11 -0400 Subject: [PATCH 0947/1112] update docs and release note #7424 --- doc/release-notes/7424-mailsession.md | 11 ++++++----- doc/sphinx-guides/source/installation/config.rst | 1 + .../source/installation/installation-main.rst | 4 +++- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index 43846b0b72d..faaf618bc17 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -1,12 +1,13 @@ -## New way to configure mail transfer agent +## Simplified SMTP configuration -With this release, we deprecate the usage of `asadmin create-javamail-resource` to configure your MTA. -Instead, we provide the ability to configure your SMTP mail host using JVM options only, with the flexibility of MicroProfile Config. +With this release, we deprecate the usage of `asadmin create-javamail-resource` to configure Dataverse to send mail using your SMTP server and provide a simplified, standard alternative using JVM options or MicroProfile Config. At this point, no action is required if you want to keep your current configuration. Warnings will show in your server logs to inform and remind you about the deprecation. A future major release of Dataverse may remove this way of configuration. -For more details on how to configure the connection to your mail provider, please find updated details within the Installation Guide's main installation and configuration section. +Please do take the opportunity to update your SMTP configuration. Details can be found in the [dataverse.mail.mta.*](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta) section of the Installation Guide. -Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. \ No newline at end of file +Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. + +Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 889cee537d0..30d0567c557 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3025,6 +3025,7 @@ Detailed description for every setting can be found in the table included within ``dataverse.mail.mta.noop.strict``, ``dataverse.mail.mta.mailextension`` +See also :ref:`mail-host-config-auth`. dataverse.ui.allow-review-for-incomplete ++++++++++++++++++++++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 9f935db6510..c20b848e1f5 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -141,6 +141,8 @@ Got ERR_ADDRESS_UNREACHABLE While Navigating on Interface or API Calls If you are receiving an ``ERR_ADDRESS_UNREACHABLE`` while navigating the GUI or making an API call, make sure the ``siteUrl`` JVM option is defined. For details on how to set ``siteUrl``, please refer to :ref:`dataverse.siteUrl` from the :doc:`config` section. For context on why setting this option is necessary, refer to :ref:`dataverse.fqdn` from the :doc:`config` section. +.. _mail-host-config-auth: + Mail Host Configuration & Authentication ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -149,7 +151,7 @@ If you need to alter your mail host address, user, or provide a password to conn To enable authentication with your mail server, simply configure the following options: - ``dataverse.mail.mta.auth = true`` -- ``dataverse.mail.mta.username = `` +- ``dataverse.mail.mta.user = `` - ``dataverse.mail.mta.password`` **WARNING**: From caf56823905a0e68ddfda2b855abc9e39a6af59e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 25 Mar 2024 18:14:37 -0400 Subject: [PATCH 0948/1112] link higher up in the guides #7424 --- doc/release-notes/7424-mailsession.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index faaf618bc17..37fede1bb1f 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -6,7 +6,7 @@ At this point, no action is required if you want to keep your current configurat Warnings will show in your server logs to inform and remind you about the deprecation. A future major release of Dataverse may remove this way of configuration. -Please do take the opportunity to update your SMTP configuration. Details can be found in the [dataverse.mail.mta.*](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta) section of the Installation Guide. +Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [dataverse.mail.system.email](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-system-email) section of the Installation Guide. Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. From 362b87e1e7dd079a28c159246daa211525fc0bb3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 13:57:15 +0100 Subject: [PATCH 0949/1112] fix(mail): remove duplicate JvmSettings.MAIL_MTA_HOST The setting is already covered by the "host" property string in MailSessionProducer. --- .../edu/harvard/iq/dataverse/settings/JvmSettings.java | 1 - .../java/edu/harvard/iq/dataverse/MailServiceBeanIT.java | 2 +- .../harvard/iq/dataverse/util/MailSessionProducerIT.java | 8 ++++---- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index e71cabceffe..524df1e1ce9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -195,7 +195,6 @@ public enum JvmSettings { MAIL_DEBUG(SCOPE_MAIL, "debug"), // Mail Transfer Agent settings SCOPE_MAIL_MTA(SCOPE_MAIL, "mta"), - MAIL_MTA_HOST(SCOPE_MAIL_MTA, "host"), MAIL_MTA_AUTH(SCOPE_MAIL_MTA, "auth"), MAIL_MTA_USER(SCOPE_MAIL_MTA, "user"), MAIL_MTA_PASSWORD(SCOPE_MAIL_MTA, "password"), diff --git a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanIT.java b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanIT.java index 08eed9fe295..17dede5e9f3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanIT.java @@ -42,7 +42,7 @@ @Testcontainers(disabledWithoutDocker = true) @ExtendWith(MockitoExtension.class) @LocalJvmSettings -@JvmSetting(key = JvmSettings.MAIL_MTA_HOST, method = "tcSmtpHost") +@JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpHost", varArgs = "host") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpPort", varArgs = "port") class MailServiceBeanIT { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java b/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java index c4893652153..29b6598b1a9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/MailSessionProducerIT.java @@ -71,7 +71,7 @@ static void tearDown() { @Nested @LocalJvmSettings - @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, method = "tcSmtpHost") + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpHost", varArgs = "host") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpPort", varArgs = "port") class WithoutAuthentication { @Container @@ -121,7 +121,7 @@ void createSession() { @Nested @LocalJvmSettings - @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, method = "tcSmtpHost") + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpHost", varArgs = "host") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpPort", varArgs = "port") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, varArgs = "ssl.enable", value = "true") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, varArgs = "ssl.trust", value = "*") @@ -183,7 +183,7 @@ void createSession() { @Nested @LocalJvmSettings - @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, method = "tcSmtpHost") + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpHost", varArgs = "host") @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, method = "tcSmtpPort", varArgs = "port") @JvmSetting(key = JvmSettings.MAIL_MTA_AUTH, value = "yes") @JvmSetting(key = JvmSettings.MAIL_MTA_USER, value = username) @@ -252,7 +252,7 @@ void invalidConfigItemsAreIgnoredOnSessionBuild() { } @Test - @JvmSetting(key = JvmSettings.MAIL_MTA_HOST, value = "foobar") + @JvmSetting(key = JvmSettings.MAIL_MTA_SETTING, value = "foobar", varArgs = "host") void invalidHostnameIsFailingWhenSending() { assertDoesNotThrow(() -> new MailSessionProducer().getSession()); From b8ca4a70788943a05188945709a084156eb8f7bb Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 14:05:51 +0100 Subject: [PATCH 0950/1112] fix(mail): do not add a default for SMPT host in ct profile As Payara 6.2023.7 still suffers from the MPCONFIG bug where a profiled setting is not easy to override, lets just remove the default for the container profile and make people add it even for containers. --- doc/sphinx-guides/source/installation/config.rst | 3 +-- src/main/resources/META-INF/microprofile-config.properties | 2 -- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 30d0567c557..6d061ece384 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2946,8 +2946,7 @@ The following table describes the most important settings commonly used. - Default Value * - ``dataverse.mail.mta.host`` - The SMTP server to connect to. - - | *No default* - | (``smtp`` in our :ref:`Dataverse container `) + - *No default* * - ``dataverse.mail.mta.port`` - The SMTP server port to connect to. - ``25`` diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 9924d2518ca..517a4e9513b 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -45,8 +45,6 @@ dataverse.rserve.tempdir=/tmp/Rserv # MAIL dataverse.mail.mta.auth=false dataverse.mail.mta.allow-utf8-addresses=true -# In containers, default to hostname smtp, a container on the same network -%ct.dataverse.mail.mta.host=smtp # OAI SERVER dataverse.oai.server.maxidentifiers=100 From d8198b53c3c92af2e91fe6e1df65af791b356b77 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 14:40:11 +0100 Subject: [PATCH 0951/1112] style(mail): enable more debug output from session producer In case people want to debug Jakarta Mail, they activate dataverse.mail.debug. Let's hook into that and add more verbose output from the session producer, too. That way people can make sure everything is set up as they wish. --- .../dataverse/util/MailSessionProducer.java | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java index 13fedb94014..149f92761d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java @@ -16,6 +16,7 @@ import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.stream.Collectors; @ApplicationScoped public class MailSessionProducer { @@ -42,6 +43,12 @@ public class MailSessionProducer { private static final String PREFIX = "mail.smtp."; private static final Logger logger = Logger.getLogger(MailSessionProducer.class.getCanonicalName()); + static { + if (Boolean.TRUE.equals(JvmSettings.MAIL_DEBUG.lookup(Boolean.class))) { + logger.setLevel(Level.FINE); + } + } + Session systemMailSession; /** @@ -60,7 +67,7 @@ public MailSessionProducer() { } catch (NamingException e) { // This exception simply means the appserver did not provide the legacy mail session. // Debug level output is just fine. - logger.log(Level.FINE, "Error during mail resource lookup", e); + logger.log(Level.FINER, "Error during legacy appserver-level mail resource lookup", e); } } @@ -75,14 +82,21 @@ public Session getSession() { } if (systemMailSession == null) { + logger.fine("Setting up new mail session"); + // Initialize with null (= no authenticator) is a valid argument for the session factory method. Authenticator authenticator = null; // In case we want auth, create an authenticator (default = false from microprofile-config.properties) - if (JvmSettings.MAIL_MTA_AUTH.lookup(Boolean.class)) { + if (Boolean.TRUE.equals(JvmSettings.MAIL_MTA_AUTH.lookup(Boolean.class))) { + logger.fine("Mail Authentication is enabled, building authenticator"); authenticator = new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { + logger.fine(() -> + String.format("Returning PasswordAuthenticator with username='%s', password='%s'", + JvmSettings.MAIL_MTA_USER.lookup(), + "*".repeat(JvmSettings.MAIL_MTA_PASSWORD.lookup().length()))); return new PasswordAuthentication(JvmSettings.MAIL_MTA_USER.lookup(), JvmSettings.MAIL_MTA_PASSWORD.lookup()); } }; @@ -116,6 +130,10 @@ Properties getMailProperties() { prop -> JvmSettings.MAIL_MTA_SETTING.lookupOptional(Integer.class, prop).ifPresent( number -> configuration.put(PREFIX + prop, number.toString()))); + logger.fine(() -> "Compiled properties:" + configuration.entrySet().stream() + .map(entry -> "\"" + entry.getKey() + "\": \"" + entry.getValue() + "\"") + .collect(Collectors.joining(",\n"))); + return configuration; } From 2a73426d87c755f97ca9cc9af6c80f2a3f2347be Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 14:58:51 +0100 Subject: [PATCH 0952/1112] fix(mail): do not fail to deploy when debugging is not configured --- .../java/edu/harvard/iq/dataverse/util/MailSessionProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java index 149f92761d2..202772201de 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java @@ -44,7 +44,7 @@ public class MailSessionProducer { private static final Logger logger = Logger.getLogger(MailSessionProducer.class.getCanonicalName()); static { - if (Boolean.TRUE.equals(JvmSettings.MAIL_DEBUG.lookup(Boolean.class))) { + if (Boolean.TRUE.equals(JvmSettings.MAIL_DEBUG.lookupOptional(Boolean.class).orElse(false))) { logger.setLevel(Level.FINE); } } From 21aa73d31ef8cd96b8b32f3e8de140352c120ef3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 15:00:34 +0100 Subject: [PATCH 0953/1112] style(mail): applying better fix for default value of mail debugging --- .../java/edu/harvard/iq/dataverse/util/MailSessionProducer.java | 2 +- src/main/resources/META-INF/microprofile-config.properties | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java index 202772201de..149f92761d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailSessionProducer.java @@ -44,7 +44,7 @@ public class MailSessionProducer { private static final Logger logger = Logger.getLogger(MailSessionProducer.class.getCanonicalName()); static { - if (Boolean.TRUE.equals(JvmSettings.MAIL_DEBUG.lookupOptional(Boolean.class).orElse(false))) { + if (Boolean.TRUE.equals(JvmSettings.MAIL_DEBUG.lookup(Boolean.class))) { logger.setLevel(Level.FINE); } } diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 517a4e9513b..b0bc92cf975 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -43,6 +43,7 @@ dataverse.rserve.password=rserve dataverse.rserve.tempdir=/tmp/Rserv # MAIL +dataverse.mail.debug=false dataverse.mail.mta.auth=false dataverse.mail.mta.allow-utf8-addresses=true From a33168df198c3bb977ec8d8e051ee845a11c413c Mon Sep 17 00:00:00 2001 From: Jose Lucas Cordeiro Date: Tue, 26 Mar 2024 11:12:34 -0300 Subject: [PATCH 0954/1112] #10411: Update Explicit Groups Documentation Adding more details in the request for the explicit groups documentation. Issue listed here: https://github.com/IQSS/dataverse/issues/10411 --- doc/sphinx-guides/source/api/native-api.rst | 82 +++++++++++++++++++-- 1 file changed, 74 insertions(+), 8 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 144e3ac8e5e..def894aec6d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4168,33 +4168,99 @@ Data being POSTed is json-formatted description of the group:: "aliasInOwner":"ccs" } +A curl example: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/groups" --data '{"description":"Describe the group here","displayName":"Close Collaborators", "aliasInOwner":"ccs"}' + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/24/groups" --data '{"description":"Describe the group here","displayName":"Close Collaborators", "aliasInOwner":"ccs"}' + List Explicit Groups in a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -List explicit groups under Dataverse collection ``$id``:: +List explicit groups under Dataverse collection ``ID``. A curl example using an ``ID``: - GET http://$server/api/dataverses/$id/groups +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/groups" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/24/groups" Show Single Group in a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Show group ``$groupAlias`` under dataverse ``$dv``:: +Show group ``$GROUP_ALIAS`` under dataverse ``$DATAVERSE_ID`` and a ``$GROUP_ALIAS``: - GET http://$server/api/dataverses/$dv/groups/$groupAlias +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export GROUP_ALIAS=ccs + export DATAVERSE_ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$DATAVERSE_ID/groups/$GROUP_ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/24/groups/ccs" Update Group in a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Show group ``$GROUP_ALIAS`` under dataverse ``$DATAVERSE_ID`` and a ``$GROUP_ALIAS``. The request body is the same as the create group one, except that the group alias cannot be changed. Thus, the field ``aliasInOwner`` is ignored.: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export GROUP_ALIAS=ccs + export DATAVERSE_ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$DATAVERSE_ID/groups/$GROUP_ALIAS" --data '{"description":"Describe the group here","displayName":"Close Collaborators"}' -Update group ``$groupAlias`` under Dataverse collection ``$dv``. The request body is the same as the create group one, except that the group alias cannot be changed. Thus, the field ``aliasInOwner`` is ignored. :: +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - PUT http://$server/api/dataverses/$dv/groups/$groupAlias + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/24/groups/ccs" --data '{"description":"Describe the group here","displayName":"Close Collaborators"}' Delete Group from a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Delete group ``$groupAlias`` under Dataverse collection ``$dv``:: +Delete group ``$GROUP_ALIAS`` under Dataverse collection ``$DATAVERSE_ID``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export GROUP_ALIAS=ccs + export DATAVERSE_ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$DATAVERSE_ID/groups/$GROUP_ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - DELETE http://$server/api/dataverses/$dv/groups/$groupAlias + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/24/groups/ccs" Add Multiple Role Assignees to an Explicit Group ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 8898d5367b34215c0991f300b072d2fe6fd4de91 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 26 Mar 2024 10:25:04 -0400 Subject: [PATCH 0955/1112] adding release note --- doc/release-notes/10424-new-api-for-mdc.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 doc/release-notes/10424-new-api-for-mdc.md diff --git a/doc/release-notes/10424-new-api-for-mdc.md b/doc/release-notes/10424-new-api-for-mdc.md new file mode 100644 index 00000000000..8fb1f6d9e3d --- /dev/null +++ b/doc/release-notes/10424-new-api-for-mdc.md @@ -0,0 +1,11 @@ +The API endpoint `api/admin/makeDataCount/{yearMonth}/processingState` has been added to Get, Create/Update(POST), and Delete a State for processing Make Data Count logged metrics +For Create/Update the 'state' is passed in through a query parameter. +Example +- `curl POST http://localhost:8080/api/admin/makeDataCount/2024-03/processingState?state=Skip` + +Valid values for state are [New, Done, Skip, Processing, and Failed] +'New' can be used to re-trigger the processing of the data for the year-month specified. +'Skip' will prevent the file from being processed. +'Processing' shows the state where the file is currently being processed. +'Failed' shows the state where the file has failed and will be re-processed in the next run. If you don't want the file to be re-processed set the state to 'Skip'. +'Done' is the state where the file has been successfully processed. From 36193714c14de8b13e33cfe0dfeabf8c730e1fe2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Mar 2024 10:37:25 -0400 Subject: [PATCH 0956/1112] fix dot to dash --- doc/release-notes/7424-mailsession.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index 37fede1bb1f..470b78cf2de 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -6,7 +6,7 @@ At this point, no action is required if you want to keep your current configurat Warnings will show in your server logs to inform and remind you about the deprecation. A future major release of Dataverse may remove this way of configuration. -Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [dataverse.mail.system.email](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-system-email) section of the Installation Guide. +Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [dataverse.mail.system-email](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-system-email) section of the Installation Guide. Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. From c498cebb31783f242f025d829bfdb11a2c46e79a Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 16:07:10 +0100 Subject: [PATCH 0957/1112] doc(mail): add ssl.enable setting to shortlist Also add notes about common ports in use. --- doc/sphinx-guides/source/installation/config.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 6d061ece384..25afbcc8fff 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2948,8 +2948,11 @@ The following table describes the most important settings commonly used. - The SMTP server to connect to. - *No default* * - ``dataverse.mail.mta.port`` - - The SMTP server port to connect to. + - The SMTP server port to connect to. (Common are ``25`` for plain, ``587`` for SSL, ``465`` for legacy SSL) - ``25`` + * - ``dataverse.mail.mta.ssl.enable`` + - Enable if your mail provider uses SSL. + - ``false`` * - ``dataverse.mail.mta.auth`` - If ``true``, attempt to authenticate the user using the AUTH command. - ``false`` @@ -2981,7 +2984,6 @@ Detailed description for every setting can be found in the table included within * SSL/TLS: ``dataverse.mail.mta.starttls.enable``, ``dataverse.mail.mta.starttls.required``, - ``dataverse.mail.mta.ssl.enable``, ``dataverse.mail.mta.ssl.checkserveridentity``, ``dataverse.mail.mta.ssl.trust``, ``dataverse.mail.mta.ssl.protocols``, From 3e9d992a9abdeae80a796a96ff93246e2817119f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 16:11:24 +0100 Subject: [PATCH 0958/1112] doc(mail): add newly added settings to release note --- doc/release-notes/7424-mailsession.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index 470b78cf2de..f67dbd6efc5 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -11,3 +11,14 @@ Please do take the opportunity to update your SMTP configuration. Details can be Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. + +List of options added: +- dataverse.mail.system-email +- dataverse.mail.mta.host +- dataverse.mail.mta.port +- dataverse.mail.mta.ssl.enable +- dataverse.mail.mta.auth +- dataverse.mail.mta.user +- dataverse.mail.mta.password +- dataverse.mail.mta.allow-utf8-addresses +- Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). \ No newline at end of file From 785dfc5251d8544ba2681eb4a1d82856baebe1e3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 16:21:09 +0100 Subject: [PATCH 0959/1112] chore(build): update Maven and test framework dependencies --- modules/dataverse-parent/pom.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index a15575e6e50..db8b3186efc 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -168,11 +168,11 @@ 5.2.0 - 1.19.1 - 3.4.1 - 5.10.0 - 5.6.0 - 0.8.10 + 1.19.7 + 3.7.1 + 5.10.2 + 5.11.0 + 0.8.11 9.3 @@ -182,8 +182,8 @@ 3.3.2 3.5.0 3.1.1 - 3.1.0 - 3.1.0 + 3.2.5 + 3.2.5 3.6.0 3.3.1 3.0.0-M7 @@ -199,7 +199,7 @@ 1.7.0 - 0.43.4 + 0.44.0 From 243bafed1363c2edd67d8a3dcf75b6ae76b29bfd Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 26 Mar 2024 11:35:49 -0400 Subject: [PATCH 0960/1112] adding test for invalid state --- .../iq/dataverse/api/MakeDataCountApi.java | 4 ++-- .../MakeDataCountProcessState.java | 17 ++++++++++----- .../iq/dataverse/api/MakeDataCountApiIT.java | 21 +++++++++++++++++++ 3 files changed, 35 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 38023327274..d94ab42c516 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -232,8 +232,8 @@ public Response updateProcessingState(@PathParam("yearMonth") String yearMonth, MakeDataCountProcessState mdcps; try { mdcps = makeDataCountProcessStateService.setMakeDataCountProcessState(yearMonth, state); - } catch (IllegalArgumentException e) { - return error(Status.BAD_REQUEST,e.getMessage()); + } catch (Exception e) { + return badRequest(e.getMessage()); } JsonObjectBuilder output = Json.createObjectBuilder(); diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java index f49640214e9..bde705abf44 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java @@ -5,6 +5,7 @@ import java.io.Serializable; import java.sql.Timestamp; import java.time.Instant; +import java.util.Arrays; @Entity public class MakeDataCountProcessState implements Serializable { @@ -19,6 +20,16 @@ public enum MDCProcessState { private MDCProcessState(final String text) { this.text = text; } + public static MDCProcessState fromString(String text) { + if (text != null) { + for (MDCProcessState state : MDCProcessState.values()) { + if (text.equals(state.text)) { + return state; + } + } + } + throw new IllegalArgumentException("State must be one of these values: " + Arrays.asList(MDCProcessState.values()) + "."); + } @Override public String toString() { return text; @@ -52,11 +63,7 @@ public void setState(MDCProcessState state) { this.state_change_time = Timestamp.from(Instant.now()); } public void setState(String state) throws IllegalArgumentException { - if (state != null) { - setState(MDCProcessState.valueOf(state.toUpperCase())); - } else { - throw new IllegalArgumentException("State is required and can not be null"); - } + setState(MDCProcessState.fromString(state)); } public MDCProcessState getState() { return this.state; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java index dbfd853edd1..64856461703 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java @@ -211,6 +211,27 @@ public void testGetUpdateDeleteProcessingState() { deleteState.then().assertThat().statusCode(OK.getStatusCode()); } + @Test + public void testUpdateProcessingStateWithInvalidState() { + String yearMonth = "2000-02"; + // make sure it isn't in the DB + Response deleteState = UtilIT.makeDataCountDeleteProcessingState(yearMonth); + deleteState.then().assertThat().statusCode(anyOf(equalTo(200), equalTo(404))); + + Response stateResponse = UtilIT.makeDataCountUpdateProcessingState(yearMonth, "InvalidState"); + stateResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + stateResponse = UtilIT.makeDataCountUpdateProcessingState(yearMonth, "new"); + stateResponse.then().assertThat().statusCode(OK.getStatusCode()); + stateResponse = UtilIT.makeDataCountUpdateProcessingState(yearMonth, "InvalidState"); + stateResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + stateResponse = UtilIT.makeDataCountGetProcessingState(yearMonth); + stateResponse.then().assertThat().statusCode(OK.getStatusCode()); + JsonPath stateJson = JsonPath.from(stateResponse.body().asString()); + String state = stateJson.getString("data.state"); + assertThat(state, Matchers.equalTo(MakeDataCountProcessState.MDCProcessState.NEW.name())); + } + /** * Ignore is set on this test because it requires database edits to pass. * There are currently two citions for doi:10.7910/DVN/HQZOOB but you have From 6b8b90743e2349e90ee9f3ff9f4597dc572fab0f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 16:44:37 +0100 Subject: [PATCH 0961/1112] chore(build): downgrade DMP to 0.43.4 We need to downgrade to 0.43.4 again because of this regression: fabric8io/docker-maven-plugin#1756 Once they release a new version, try again. --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index db8b3186efc..1a538905a8d 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -199,7 +199,7 @@ 1.7.0 - 0.44.0 + 0.43.4 From df4838241914dcc5320a4ad81f2798fe094878bb Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Mar 2024 12:05:58 -0400 Subject: [PATCH 0962/1112] simply smtp config docs #7424 --- .../source/installation/config.rst | 25 +++++++++++++++- .../source/installation/installation-main.rst | 30 ------------------- 2 files changed, 24 insertions(+), 31 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 25afbcc8fff..28b549ec765 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -718,6 +718,19 @@ To enable bearer tokens, you must install and configure Keycloak (for now, see : You can test that bearer tokens are working by following the example under :ref:`bearer-tokens` in the API Guide. +.. _smtp-config: + +SMTP/Email Configuration +------------------------ + +The installer prompts you for some basic options to configure Dataverse to send email using your SMTP server, but in many cases, extra configuration may be necessary. + +Make sure the :ref:`dataverse.mail.support-email` has been set. Email will not be sent without it. + +Then check the list of commonly used settings at the top of :ref:`dataverse.mail.mta`. + +If you have trouble, consider turning on debugging with :ref:`dataverse.mail.debug`. + .. _database-persistence: Database Persistence @@ -2889,6 +2902,8 @@ Please note that if you're having any trouble sending email, you can refer to "T Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_MAIL_SYSTEM_EMAIL``. +See also :ref:`smtp-config`. + .. _dataverse.mail.support-email: dataverse.mail.support-email @@ -2904,6 +2919,8 @@ If you don't include the text, the installation name (see :ref:`Branding Your In Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_MAIL_SUPPORT_EMAIL``. +See also :ref:`smtp-config`. + .. _dataverse.mail.cc-support-on-contact-email: dataverse.mail.cc-support-on-contact-email @@ -2915,6 +2932,10 @@ The default is false. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_MAIL_CC_SUPPORT_ON_CONTACT_EMAIL``. +See also :ref:`smtp-config`. + +.. _dataverse.mail.debug: + dataverse.mail.debug ++++++++++++++++++++ @@ -2923,6 +2944,8 @@ Defaults to ``false``. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_MAIL_DEBUG``. +See also :ref:`smtp-config`. + .. _dataverse.mail.mta: dataverse.mail.mta.* @@ -3026,7 +3049,7 @@ Detailed description for every setting can be found in the table included within ``dataverse.mail.mta.noop.strict``, ``dataverse.mail.mta.mailextension`` -See also :ref:`mail-host-config-auth`. +See also :ref:`smtp-config`. dataverse.ui.allow-review-for-incomplete ++++++++++++++++++++++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index c20b848e1f5..3c3376e3c85 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -141,36 +141,6 @@ Got ERR_ADDRESS_UNREACHABLE While Navigating on Interface or API Calls If you are receiving an ``ERR_ADDRESS_UNREACHABLE`` while navigating the GUI or making an API call, make sure the ``siteUrl`` JVM option is defined. For details on how to set ``siteUrl``, please refer to :ref:`dataverse.siteUrl` from the :doc:`config` section. For context on why setting this option is necessary, refer to :ref:`dataverse.fqdn` from the :doc:`config` section. -.. _mail-host-config-auth: - -Mail Host Configuration & Authentication -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -If you need to alter your mail host address, user, or provide a password to connect with, these settings are easily changed using JVM options group :ref:`dataverse.mail.mta`. - -To enable authentication with your mail server, simply configure the following options: - -- ``dataverse.mail.mta.auth = true`` -- ``dataverse.mail.mta.user = `` -- ``dataverse.mail.mta.password`` - -**WARNING**: -We strongly recommend not using plaintext storage or environment variables, but relying on :ref:`secure-password-storage`. - -**WARNING**: -It’s recommended to use an *app password* (for smtp.gmail.com users) or utilize a dedicated/non-personal user account with SMTP server auths so that you do not risk compromising your password. - -If your installation’s mail host uses SSL (like smtp.gmail.com) you’ll need to configure these options: - -- ``dataverse.mail.mta.ssl.enable = true`` -- ``dataverse.mail.mta.port = 587`` - -**NOTE**: Some mail providers might still support using port 465, which formerly was assigned to be SMTP over SSL (SMTPS). -However, this is no longer standardized and the port has been reassigned by the IANA to a different service. -If your provider supports using port 587, be advised to migrate your configuration. - -As the mail server connection (session) is cached once created, you need to restart Payara when applying configuration changes. - UnknownHostException While Deploying ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From cb144236b5469dcb6ef23f78d411ab5150c64656 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 26 Mar 2024 17:13:51 +0100 Subject: [PATCH 0963/1112] doc(mail): fix some typos, add hint about support in new SMTP config section --- doc/sphinx-guides/source/installation/config.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 28b549ec765..207b6acb305 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -725,7 +725,8 @@ SMTP/Email Configuration The installer prompts you for some basic options to configure Dataverse to send email using your SMTP server, but in many cases, extra configuration may be necessary. -Make sure the :ref:`dataverse.mail.support-email` has been set. Email will not be sent without it. +Make sure the :ref:`dataverse.mail.system-email` has been set. Email will not be sent without it. A hint will be logged about this fact. +If you want to separate system email from your support team's email, take a look at :ref:`dataverse.mail.support-email`. Then check the list of commonly used settings at the top of :ref:`dataverse.mail.mta`. From e784eb33848085c7e10f736db60e2ae2e8d42541 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Mar 2024 12:18:12 -0400 Subject: [PATCH 0964/1112] point release note at new SMTP section #7424 --- doc/release-notes/7424-mailsession.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md index f67dbd6efc5..67c876f7ad5 100644 --- a/doc/release-notes/7424-mailsession.md +++ b/doc/release-notes/7424-mailsession.md @@ -6,7 +6,7 @@ At this point, no action is required if you want to keep your current configurat Warnings will show in your server logs to inform and remind you about the deprecation. A future major release of Dataverse may remove this way of configuration. -Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [dataverse.mail.system-email](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-system-email) section of the Installation Guide. +Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [SMTP/Email Configuration](https://guides.dataverse.org/en/6.2/installation/config.html#smtp-email-configuration) section of the Installation Guide. Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. From b8822b9934e4f5591b3d094fd1a7719dc2217d2b Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 12:24:40 -0400 Subject: [PATCH 0965/1112] Initial version --- doc/release-notes/6.2-release-notes.md | 242 +++++++++++++++++++++++++ 1 file changed, 242 insertions(+) create mode 100644 doc/release-notes/6.2-release-notes.md diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md new file mode 100644 index 00000000000..48903fb8b34 --- /dev/null +++ b/doc/release-notes/6.2-release-notes.md @@ -0,0 +1,242 @@ +# Dataverse 6.2 + +Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.2 rather than the list of releases, which will cut them off. + +This release brings new features, enhancements, and bug fixes to the Dataverse software. +Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release highlights + +### New API Endpoint for Clearing an Individual Dataset From Solr + +A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. + +### Return to Author Now Requires a Reason + +The Popup for returning to author now requires a reason that will be sent by email to the author. + +Please note that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. + +### Support for Using Multiple PID Providers + +This release adds support for using multiple PID (DOI, Handle, PermalLink) providers, multiple PID provider accounts +(managing a given protocol, authority,separator, shoulder combination), assigning PID provider accounts to specific collections, +and supporting transferred PIDs (where a PID is managed by an account when it's authority, separator, and/or shoulder don't match +the combination where the account can mint new PIDs). It also adds the ability for additional provider services beyond the existing +DataCite, EZId, Handle, and PermaLink providers to be dynamically added as separate jar files. + +These changes require per-provider settings rather than the global PID settings previously supported. While backward compatibility +for installations using a single PID Provider account is provided, updating to use the new microprofile settings is highly recommended +and will be required in a future version. + +New microprofile settings (where * indicates a provider id indicating which provider the setting is for): + +dataverse.pid.providers +dataverse.pid.default-provider +dataverse.pid.*.type +dataverse.pid.*.label +dataverse.pid.*.authority +dataverse.pid.*.shoulder +dataverse.pid.*.identifier-generation-style +dataverse.pid.*.datafile-pid-format +dataverse.pid.*.managed-list +dataverse.pid.*.excluded-list +dataverse.pid.*.datacite.mds-api-url +dataverse.pid.*.datacite.rest-api-url +dataverse.pid.*.datacite.username +dataverse.pid.*.datacite.password +dataverse.pid.*.ezid.api-url +dataverse.pid.*.ezid.username +dataverse.pid.*.ezid.password +dataverse.pid.*.permalink.base-url +dataverse.pid.*.permalink.separator +dataverse.pid.*.handlenet.index +dataverse.pid.*.handlenet.independent-service +dataverse.pid.*.handlenet.auth-handle +dataverse.pid.*.handlenet.key.path +dataverse.pid.*.handlenet.key.passphrase +dataverse.spi.pidproviders.directory + +### Geospatial Metadata Block Fields for North and South Renamed + +The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported on #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. + + +TODO: Whoever puts the release notes together should make sure there is the standard note about updating the schema after upgrading. + +### Add .QPJ and .QMD Extensions to Shapefile Handling + +- Support for `.qpj` and `.qmd` files in shapefile uploads has been introduced, ensuring that these files are properly recognized and handled as part of geospatial datasets in Dataverse. + +### Ingested Tabular Data Files Can Be Stored Without the Variable Name Header + +Tabular Data Ingest can now save the generated archival files with the list of variable names added as the first tab-delimited line. As the most significant effect of this feature. + +Access API will be able to take advantage of Direct Download for tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed content on the fly. + +This behavior is controlled by the new setting `:StoreIngestedTabularFilesWithVarHeaders`. It is false by default, preserving the legacy behavior. When enabled, Dataverse will be able to handle both the newly ingested files, and any already-existing legacy files stored without these headers transparently to the user. E.g. the access API will continue delivering tab-delimited files **with** this header line, whether it needs to add it dynamically for the legacy files, or reading complete files directly from storage for the ones stored with it. + +An API for converting existing legacy tabular files will be added separately. [this line will need to be changed if we have time to add said API before 6.2 is released]. [TODO] + +### Search by License + +A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. + +Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. See PR #10204 + +### OAI-PMH Error Handling Has Been Improved + +OAI-PMH error handling has been improved to display a machine-readable error in XML rather than a 500 error with no further information. + +- /oai?foo=bar will show "No argument 'verb' found" +- /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" + +### Rate Limiting Using JCache (With Hazelcast As Provided by Payara) + +The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. +Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. +Superuser accounts are exempt from rate limiting. +Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. +Two database settings configure the rate limiting. +Note: If either of these settings exist in the database rate limiting will be enabled. +If neither setting exists rate limiting is disabled. + +`:RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. +In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. +Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." +`curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` + +`:RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). +This allows for more control over the rate limit of individual API command calls. +In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. +`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]'` + +Hazelcast is configured in Payara and should not need any changes for this feature + +### Container Guide, Documentation for Faster Redeploy + +In the Container Guide, documentation for developers on how to quickly redeploy code has been added for Netbeans and improved for IntelliJ. + +Also in the context of containers, a new option to skip deployment has been added and the war file is now consistently named "dataverse.war" rather than having a version in the filename, such as "dataverse-6.1.war". This predictability makes tooling easier. + +Finally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running `pip install -r requirements.txt`. + +### Universe Field in Variablemetadata Table Changed + +Universe field in variablemetadata table was changed from varchar(255) to text. The change was made to support longer strings in "universe" metadata field, similar to the rest of text fields in variablemetadata table. + +### Postgres Versions + +This release adds install script support for the new permissions model in Postgres versions 15+, and bumps FlyWay to support Postgres 16. + +Postgres 13 remains the version used with automated testing. + +### Listing Collection/Dataverse API + +Listing collection/dataverse role assignments via API still requires ManageDataversePermissions, but listing dataset role assignments via API now requires only ManageDatasetPermissions. + +### Missing Database Constraints + +This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running + +SELECT uri, count(*) FROM externalvocabularyvaluet group by uri; + +and + +SELECT spec, count(*) FROM oaiset group by spec; + +and then removing any duplicate rows (where count>1). + +TODO: Whoever puts the release notes together should make sure there is the standard note about reloading metadata blocks for the citation, astrophysics, and biomedical blocks (plus any others from other PRs) after upgrading. + +### Harvesting Client API + +The API endpoint `api/harvest/clients/{harvestingClientNickname}` has been extended to include the following fields: + +- `allowHarvestingMissingCVV`: enable/disable allowing datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. Default is false. +Note: This setting is only available to the API and not currently accessible/settable via the UI + +### New QA Guide + +A new QA Guide is intended mostly for the core development team but may be of interest to contributors. + +### New Accounts Metrics API + +Users can retrieve new types of metrics related to user accounts. The new capabilities are [described](https://guides.dataverse.org/en/6.2/api/metrics.html) in the guides. + +### New canDownloadAtLeastOneFile API + +The GET canDownloadAtLeastOneFile (/api/datasets/{id}/versions/{versionId}/canDownloadAtLeastOneFile) endpoint has been created. + +This API endpoint indicates if the calling user can download at least one file from a dataset version. Note that Shibboleth group permissions are not considered. + +### Extended getVersionFiles API + +The response for getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) endpoint has been modified to include a total count of records available (totalCount:x). +This will aid in pagination by allowing the caller to know how many pages can be iterated through. The existing API (getVersionFileCounts) to return the count will still be available. + +### Extended Metadata Blocks API + +The API endpoint `/api/metadatablocks/{block_id}` has been extended to include the following fields: + +- `isRequired`: Whether or not this field is required +- `displayOrder`: The display order of the field in create/edit forms +- `typeClass`: The type class of this field ("controlledVocabulary", "compound", or "primitive") + +### Evaluation Version Tutorial on the Containers Guide + +The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container + +### Get File Citation As JSON + +It is now possible to retrieve via API the file citation as it appears on the file landing page. It is formatted in HTML and encoded in JSON. + +This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. This functionality has been requested in https://github.com/IQSS/dataverse/issues/3140 and https://github.com/IQSS/dataverse/issues/9994 + +### Extended Files API + +The API endpoint `api/files/{id}` has been extended to support the following optional query parameters: + +- `includeDeaccessioned`: Indicates whether or not to consider deaccessioned dataset versions in the latest file search. (Default: `false`). +- `returnDatasetVersion`: Indicates whether or not to include the dataset version of the file in the response. (Default: `false`). + +A new endpoint `api/files/{id}/versions/{datasetVersionId}` has been created. This endpoint returns the file metadata present in the requested dataset version. To specify the dataset version, you can use ``:latest-published``, or ``:latest``, or ``:draft`` or ``1.0`` or any other available version identifier. + +The endpoint supports the `includeDeaccessioned` and `returnDatasetVersion` optional query parameters, as does the `api/files/{id}` endpoint. + +`api/files/{id}/draft` endpoint is no longer available in favor of the new endpoint `api/files/{id}/versions/{datasetVersionId}`, which can use the version identifier ``:draft`` (`api/files/{id}/versions/:draft`) to obtain the same result. + +### Endpoint Extended: Datasets, Dataverse Collections, and Datafiles + +The API endpoints for getting datasets, Dataverse collections, and datafiles have been extended to support the following optional 'returnOwners' query parameter. + +Including the parameter and setting it to true will add a hierarchy showing which dataset and dataverse collection(s) the object is part of to the json object returned. + +### Endpoint Fixed: Datasets Metadata + +The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. + +### Uningest/Reingest Options Available in the File Page Edit Menu + +New Uningest/Reingest options are available in the File Page Edit menu, allowing ingest errors to be cleared (by users who can published the associated dataset) +and (by superusers) for a successful ingest to be undone or retried (e.g. after a Dataverse version update or if ingest size limits are changed). +The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. + +### Publication Status Facet Restored + +In version 6.1, the publication status facet location was unintentionally moved to the bottom. In this version, we have restored the original order. + +### Permissions Required To Assign a Role Have Been Fixed + +The permissions required to assign a role have been fixed. It is no longer possible to assign a role that includes permissions that the assigning user doesn't have. + +### Binder Redirect + +If your installation is configured to use Binder, you should remove the old "girder_ythub" tool and replace it with the tool described at https://github.com/IQSS/dataverse-binder-redirect + +For more information, see #10360. + + +### Optional Croissant Exporter Support + +When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the `` of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. From edb141fb35294423c7866e45c61fcdbf60859de1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Mar 2024 12:27:37 -0400 Subject: [PATCH 0966/1112] Update doc/sphinx-guides/source/api/native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index def894aec6d..5c34543d6aa 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4195,7 +4195,7 @@ List explicit groups under Dataverse collection ``ID``. A curl example using an export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/groups" + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/groups" The fully expanded example above (without environment variables) looks like this: From 4ef47742ed9462d80ea13945186434a02567bdc2 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 12:37:30 -0400 Subject: [PATCH 0967/1112] Delete files --- doc/release-notes/10101-qa-guide.md | 1 - ...17-api-metrics-add-user-accounts-number.md | 3 -- ...datasets-can-download-at-least-one-file.md | 3 -- ...onFiles-api-to-include-total-file-count.md | 2 - doc/release-notes/10216-metadatablocks.md | 5 --- doc/release-notes/10238-container-demo.md | 1 - doc/release-notes/10240-file-citation.md | 5 --- .../10280-get-file-api-extension.md | 10 ----- .../10286-return-owner-added-to-get-apis.md | 5 --- doc/release-notes/10297-metadata-api-fix.md | 1 - .../10318-uningest-and-reingest.md | 3 -- ...38-expose-and-sort-publish-status-facet.md | 1 - ...sign-roles-without-privilege-escalation.md | 1 - doc/release-notes/10360-binder-redirect.md | 3 -- .../10382-optional-croissant-exporter.md | 1 - doc/release-notes/3437-new-index-api-added.md | 4 -- doc/release-notes/3623-multipid.md | 37 ------------------- doc/release-notes/3702-return-to-author.md | 4 -- .../5645-geospatial-props-nslong-fix.md | 4 -- .../8134-add-qpj-qmd-extensions.md | 3 -- ...4-storing-tabular-files-with-varheaders.md | 6 --- ...482-make-licenses-searchable-faceatable.md | 6 --- .../9275-harvest-invalid-query-params.md | 4 -- doc/release-notes/9356-rate-limiting.md | 20 ---------- doc/release-notes/9590-faster-redeploy.md | 5 --- .../9728-universe-variablemetadata.md | 1 - doc/release-notes/9920-postgres16.md | 3 -- .../9926-list-role-assignments-permissions.md | 1 - doc/release-notes/9983-unique-constraints.md | 14 ------- ...harvest-metadata-values-not-in-cvv-list.md | 4 -- 30 files changed, 161 deletions(-) delete mode 100644 doc/release-notes/10101-qa-guide.md delete mode 100644 doc/release-notes/10117-api-metrics-add-user-accounts-number.md delete mode 100644 doc/release-notes/10155-datasets-can-download-at-least-one-file.md delete mode 100644 doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md delete mode 100644 doc/release-notes/10216-metadatablocks.md delete mode 100644 doc/release-notes/10238-container-demo.md delete mode 100644 doc/release-notes/10240-file-citation.md delete mode 100644 doc/release-notes/10280-get-file-api-extension.md delete mode 100644 doc/release-notes/10286-return-owner-added-to-get-apis.md delete mode 100644 doc/release-notes/10297-metadata-api-fix.md delete mode 100644 doc/release-notes/10318-uningest-and-reingest.md delete mode 100644 doc/release-notes/10338-expose-and-sort-publish-status-facet.md delete mode 100644 doc/release-notes/10342-assign-roles-without-privilege-escalation.md delete mode 100644 doc/release-notes/10360-binder-redirect.md delete mode 100644 doc/release-notes/10382-optional-croissant-exporter.md delete mode 100644 doc/release-notes/3437-new-index-api-added.md delete mode 100644 doc/release-notes/3623-multipid.md delete mode 100644 doc/release-notes/3702-return-to-author.md delete mode 100644 doc/release-notes/5645-geospatial-props-nslong-fix.md delete mode 100644 doc/release-notes/8134-add-qpj-qmd-extensions.md delete mode 100644 doc/release-notes/8524-storing-tabular-files-with-varheaders.md delete mode 100644 doc/release-notes/9060-7482-make-licenses-searchable-faceatable.md delete mode 100644 doc/release-notes/9275-harvest-invalid-query-params.md delete mode 100644 doc/release-notes/9356-rate-limiting.md delete mode 100644 doc/release-notes/9590-faster-redeploy.md delete mode 100644 doc/release-notes/9728-universe-variablemetadata.md delete mode 100644 doc/release-notes/9920-postgres16.md delete mode 100644 doc/release-notes/9926-list-role-assignments-permissions.md delete mode 100644 doc/release-notes/9983-unique-constraints.md delete mode 100644 doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list.md diff --git a/doc/release-notes/10101-qa-guide.md b/doc/release-notes/10101-qa-guide.md deleted file mode 100644 index 11fbd7df2c4..00000000000 --- a/doc/release-notes/10101-qa-guide.md +++ /dev/null @@ -1 +0,0 @@ -A new QA Guide is intended mostly for the core development team but may be of interest to contributors. diff --git a/doc/release-notes/10117-api-metrics-add-user-accounts-number.md b/doc/release-notes/10117-api-metrics-add-user-accounts-number.md deleted file mode 100644 index 566815d6e5e..00000000000 --- a/doc/release-notes/10117-api-metrics-add-user-accounts-number.md +++ /dev/null @@ -1,3 +0,0 @@ -### New Accounts Metrics API - -Users can retrieve new types of metrics related to user accounts. The new capabilities are [described](https://guides.dataverse.org/en/6.2/api/metrics.html) in the guides. \ No newline at end of file diff --git a/doc/release-notes/10155-datasets-can-download-at-least-one-file.md b/doc/release-notes/10155-datasets-can-download-at-least-one-file.md deleted file mode 100644 index a0b0d02310a..00000000000 --- a/doc/release-notes/10155-datasets-can-download-at-least-one-file.md +++ /dev/null @@ -1,3 +0,0 @@ -The getCanDownloadAtLeastOneFile (/api/datasets/{id}/versions/{versionId}/canDownloadAtLeastOneFile) endpoint has been created. - -This API endpoint indicates if the calling user can download at least one file from a dataset version. Note that Shibboleth group permissions are not considered. diff --git a/doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md b/doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md deleted file mode 100644 index 80a71e9bb7e..00000000000 --- a/doc/release-notes/10202-extend-getVersionFiles-api-to-include-total-file-count.md +++ /dev/null @@ -1,2 +0,0 @@ -The response for getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) endpoint has been modified to include a total count of records available (totalCount:x). -This will aid in pagination by allowing the caller to know how many pages can be iterated through. The existing API (getVersionFileCounts) to return the count will still be available. \ No newline at end of file diff --git a/doc/release-notes/10216-metadatablocks.md b/doc/release-notes/10216-metadatablocks.md deleted file mode 100644 index 59d9c1640a5..00000000000 --- a/doc/release-notes/10216-metadatablocks.md +++ /dev/null @@ -1,5 +0,0 @@ -The API endpoint `/api/metadatablocks/{block_id}` has been extended to include the following fields: - -- `isRequired`: Whether or not this field is required -- `displayOrder`: The display order of the field in create/edit forms -- `typeClass`: The type class of this field ("controlledVocabulary", "compound", or "primitive") diff --git a/doc/release-notes/10238-container-demo.md b/doc/release-notes/10238-container-demo.md deleted file mode 100644 index edc4db4b650..00000000000 --- a/doc/release-notes/10238-container-demo.md +++ /dev/null @@ -1 +0,0 @@ -The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container diff --git a/doc/release-notes/10240-file-citation.md b/doc/release-notes/10240-file-citation.md deleted file mode 100644 index fb747527669..00000000000 --- a/doc/release-notes/10240-file-citation.md +++ /dev/null @@ -1,5 +0,0 @@ -## Get file citation as JSON - -It is now possible to retrieve via API the file citation as it appears on the file landing page. It is formatted in HTML and encoded in JSON. - -This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. This functionality has been requested in https://github.com/IQSS/dataverse/issues/3140 and https://github.com/IQSS/dataverse/issues/9994 diff --git a/doc/release-notes/10280-get-file-api-extension.md b/doc/release-notes/10280-get-file-api-extension.md deleted file mode 100644 index 7ed70e93dc9..00000000000 --- a/doc/release-notes/10280-get-file-api-extension.md +++ /dev/null @@ -1,10 +0,0 @@ -The API endpoint `api/files/{id}` has been extended to support the following optional query parameters: - -- `includeDeaccessioned`: Indicates whether or not to consider deaccessioned dataset versions in the latest file search. (Default: `false`). -- `returnDatasetVersion`: Indicates whether or not to include the dataset version of the file in the response. (Default: `false`). - -A new endpoint `api/files/{id}/versions/{datasetVersionId}` has been created. This endpoint returns the file metadata present in the requested dataset version. To specify the dataset version, you can use ``:latest-published``, or ``:latest``, or ``:draft`` or ``1.0`` or any other available version identifier. - -The endpoint supports the `includeDeaccessioned` and `returnDatasetVersion` optional query parameters, as does the `api/files/{id}` endpoint. - -`api/files/{id}/draft` endpoint is no longer available in favor of the new endpoint `api/files/{id}/versions/{datasetVersionId}`, which can use the version identifier ``:draft`` (`api/files/{id}/versions/:draft`) to obtain the same result. diff --git a/doc/release-notes/10286-return-owner-added-to-get-apis.md b/doc/release-notes/10286-return-owner-added-to-get-apis.md deleted file mode 100644 index b0aba92f537..00000000000 --- a/doc/release-notes/10286-return-owner-added-to-get-apis.md +++ /dev/null @@ -1,5 +0,0 @@ -The API endpoints for getting datasets, Dataverse collections, and datafiles have been extended to support the following optional 'returnOwners' query parameter. - -Including the parameter and setting it to true will add a hierarchy showing which dataset and dataverse collection(s) the object is part of to the json object returned. - - diff --git a/doc/release-notes/10297-metadata-api-fix.md b/doc/release-notes/10297-metadata-api-fix.md deleted file mode 100644 index 11ee086af04..00000000000 --- a/doc/release-notes/10297-metadata-api-fix.md +++ /dev/null @@ -1 +0,0 @@ -The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. diff --git a/doc/release-notes/10318-uningest-and-reingest.md b/doc/release-notes/10318-uningest-and-reingest.md deleted file mode 100644 index 80ca6be57ea..00000000000 --- a/doc/release-notes/10318-uningest-and-reingest.md +++ /dev/null @@ -1,3 +0,0 @@ -New Uningest/Reingest options are available in the File Page Edit menu, allowing ingest errors to be cleared (by users who can published the associated dataset) -and (by superusers) for a successful ingest to be undone or retried (e.g. after a Dataverse version update or if ingest size limits are changed). -The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. diff --git a/doc/release-notes/10338-expose-and-sort-publish-status-facet.md b/doc/release-notes/10338-expose-and-sort-publish-status-facet.md deleted file mode 100644 index b2362ddb2c5..00000000000 --- a/doc/release-notes/10338-expose-and-sort-publish-status-facet.md +++ /dev/null @@ -1 +0,0 @@ -In version 6.1, the publication status facet location was unintentionally moved to the bottom. In this version, we have restored the original order. diff --git a/doc/release-notes/10342-assign-roles-without-privilege-escalation.md b/doc/release-notes/10342-assign-roles-without-privilege-escalation.md deleted file mode 100644 index a4ef743f50d..00000000000 --- a/doc/release-notes/10342-assign-roles-without-privilege-escalation.md +++ /dev/null @@ -1 +0,0 @@ -The permissions required to assign a role have been fixed. It is no longer possible to assign a role that includes permissions that the assigning user doesn't have. \ No newline at end of file diff --git a/doc/release-notes/10360-binder-redirect.md b/doc/release-notes/10360-binder-redirect.md deleted file mode 100644 index fcf5feea69e..00000000000 --- a/doc/release-notes/10360-binder-redirect.md +++ /dev/null @@ -1,3 +0,0 @@ -If your installation is configured to use Binder, you should remove the old "girder_ythub" tool and replace it with the tool described at https://github.com/IQSS/dataverse-binder-redirect - -For more information, see #10360. diff --git a/doc/release-notes/10382-optional-croissant-exporter.md b/doc/release-notes/10382-optional-croissant-exporter.md deleted file mode 100644 index e4c96115825..00000000000 --- a/doc/release-notes/10382-optional-croissant-exporter.md +++ /dev/null @@ -1 +0,0 @@ -When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the `` of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. \ No newline at end of file diff --git a/doc/release-notes/3437-new-index-api-added.md b/doc/release-notes/3437-new-index-api-added.md deleted file mode 100644 index 2f40c65073f..00000000000 --- a/doc/release-notes/3437-new-index-api-added.md +++ /dev/null @@ -1,4 +0,0 @@ -(this API was added as a side feature of the pr #10222. the main point of the pr was an improvement in the OAI set housekeeping logic, I believe it's too obscure part of the system to warrant a relase note by itself. but the new API below needs to be announced). - -A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. - diff --git a/doc/release-notes/3623-multipid.md b/doc/release-notes/3623-multipid.md deleted file mode 100644 index 8c13eb1aec6..00000000000 --- a/doc/release-notes/3623-multipid.md +++ /dev/null @@ -1,37 +0,0 @@ -This release adds support for using multiple PID (DOI, Handle, PermalLink) providers, multiple PID provider accounts -(managing a given protocol, authority,separator, shoulder combination), assigning PID provider accounts to specific collections, -and supporting transferred PIDs (where a PID is managed by an account when it's authority, separator, and/or shoulder don't match -the combination where the account can mint new PIDs). It also adds the ability for additional provider services beyond the existing -DataCite, EZId, Handle, and PermaLink providers to be dynamically added as separate jar files. - -These changes require per-provider settings rather than the global PID settings previously supported. While backward compatibility -for installations using a single PID Provider account is provided, updating to use the new microprofile settings is highly recommended -and will be required in a future version. - -New microprofile settings (where * indicates a provider id indicating which provider the setting is for): - -dataverse.pid.providers -dataverse.pid.default-provider -dataverse.pid.*.type -dataverse.pid.*.label -dataverse.pid.*.authority -dataverse.pid.*.shoulder -dataverse.pid.*.identifier-generation-style -dataverse.pid.*.datafile-pid-format -dataverse.pid.*.managed-list -dataverse.pid.*.excluded-list -dataverse.pid.*.datacite.mds-api-url -dataverse.pid.*.datacite.rest-api-url -dataverse.pid.*.datacite.username -dataverse.pid.*.datacite.password -dataverse.pid.*.ezid.api-url -dataverse.pid.*.ezid.username -dataverse.pid.*.ezid.password -dataverse.pid.*.permalink.base-url -dataverse.pid.*.permalink.separator -dataverse.pid.*.handlenet.index -dataverse.pid.*.handlenet.independent-service -dataverse.pid.*.handlenet.auth-handle -dataverse.pid.*.handlenet.key.path -dataverse.pid.*.handlenet.key.passphrase -dataverse.spi.pidproviders.directory diff --git a/doc/release-notes/3702-return-to-author.md b/doc/release-notes/3702-return-to-author.md deleted file mode 100644 index aa7dd9feaef..00000000000 --- a/doc/release-notes/3702-return-to-author.md +++ /dev/null @@ -1,4 +0,0 @@ -### Return to author - -Popup for returning to author now requires a reason that will be sent by email to the author. -Please note that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. \ No newline at end of file diff --git a/doc/release-notes/5645-geospatial-props-nslong-fix.md b/doc/release-notes/5645-geospatial-props-nslong-fix.md deleted file mode 100644 index 4004bf38c78..00000000000 --- a/doc/release-notes/5645-geospatial-props-nslong-fix.md +++ /dev/null @@ -1,4 +0,0 @@ -Across the application, the Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported on #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. - - -TODO: Whoever puts the release notes together should make sure there is the standard note about updating the schema after upgrading. \ No newline at end of file diff --git a/doc/release-notes/8134-add-qpj-qmd-extensions.md b/doc/release-notes/8134-add-qpj-qmd-extensions.md deleted file mode 100644 index 65f4485354b..00000000000 --- a/doc/release-notes/8134-add-qpj-qmd-extensions.md +++ /dev/null @@ -1,3 +0,0 @@ -Add .qpj and .qmd Extensions to Shapefile Handling - -- Support for `.qpj` and `.qmd` files in shapefile uploads has been introduced, ensuring that these files are properly recognized and handled as part of geospatial datasets in Dataverse. diff --git a/doc/release-notes/8524-storing-tabular-files-with-varheaders.md b/doc/release-notes/8524-storing-tabular-files-with-varheaders.md deleted file mode 100644 index f7034c846f6..00000000000 --- a/doc/release-notes/8524-storing-tabular-files-with-varheaders.md +++ /dev/null @@ -1,6 +0,0 @@ -Tabular Data Ingest can now save the generated archival files with the list of variable names added as the first tab-delimited line. As the most significant effect of this feature, -Access API will be able to take advantage of Direct Download for tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed content on the fly. - -This behavior is controlled by the new setting `:StoreIngestedTabularFilesWithVarHeaders`. It is false by default, preserving the legacy behavior. When enabled, Dataverse will be able to handle both the newly ingested files, and any already-existing legacy files stored without these headers transparently to the user. E.g. the access API will continue delivering tab-delimited files **with** this header line, whether it needs to add it dynamically for the legacy files, or reading complete files directly from storage for the ones stored with it. - -An API for converting existing legacy tabular files will be added separately. [this line will need to be changed if we have time to add said API before 6.2 is released]. \ No newline at end of file diff --git a/doc/release-notes/9060-7482-make-licenses-searchable-faceatable.md b/doc/release-notes/9060-7482-make-licenses-searchable-faceatable.md deleted file mode 100644 index 1758fd4de22..00000000000 --- a/doc/release-notes/9060-7482-make-licenses-searchable-faceatable.md +++ /dev/null @@ -1,6 +0,0 @@ -### Search by License - -A browse/search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. -Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. See PR #10204 - - diff --git a/doc/release-notes/9275-harvest-invalid-query-params.md b/doc/release-notes/9275-harvest-invalid-query-params.md deleted file mode 100644 index 33d7c7bac13..00000000000 --- a/doc/release-notes/9275-harvest-invalid-query-params.md +++ /dev/null @@ -1,4 +0,0 @@ -OAI-PMH error handling has been improved to display a machine-readable error in XML rather than a 500 error with no further information. - -- /oai?foo=bar will show "No argument 'verb' found" -- /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" diff --git a/doc/release-notes/9356-rate-limiting.md b/doc/release-notes/9356-rate-limiting.md deleted file mode 100644 index 1d68669af26..00000000000 --- a/doc/release-notes/9356-rate-limiting.md +++ /dev/null @@ -1,20 +0,0 @@ -## Rate Limiting using JCache (with Hazelcast as provided by Payara) -The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. -Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. -Superuser accounts are exempt from rate limiting. -Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. -Two database settings configure the rate limiting. -Note: If either of these settings exist in the database rate limiting will be enabled. -If neither setting exists rate limiting is disabled. - -`:RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. -In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. -Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." -`curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` - -`:RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). -This allows for more control over the rate limit of individual API command calls. -In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. -`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]'` - -Hazelcast is configured in Payara and should not need any changes for this feature \ No newline at end of file diff --git a/doc/release-notes/9590-faster-redeploy.md b/doc/release-notes/9590-faster-redeploy.md deleted file mode 100644 index ed903849444..00000000000 --- a/doc/release-notes/9590-faster-redeploy.md +++ /dev/null @@ -1,5 +0,0 @@ -In the Container Guide, documentation for developers on how to quickly redeploy code has been added for Netbeans and improved for IntelliJ. - -Also in the context of containers, a new option to skip deployment has been added and the war file is now consistently named "dataverse.war" rather than having a version in the filename, such as "dataverse-6.1.war". This predictability makes tooling easier. - -Finally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running `pip install -r requirements.txt`. \ No newline at end of file diff --git a/doc/release-notes/9728-universe-variablemetadata.md b/doc/release-notes/9728-universe-variablemetadata.md deleted file mode 100644 index 66a2daf151b..00000000000 --- a/doc/release-notes/9728-universe-variablemetadata.md +++ /dev/null @@ -1 +0,0 @@ -universe field in variablemetadata table was changed from varchar(255) to text. The change was made to support longer strings in "universe" metadata field, similar to the rest of text fields in variablemetadata table. diff --git a/doc/release-notes/9920-postgres16.md b/doc/release-notes/9920-postgres16.md deleted file mode 100644 index 8aab76e98b9..00000000000 --- a/doc/release-notes/9920-postgres16.md +++ /dev/null @@ -1,3 +0,0 @@ -This release adds install script support for the new permissions model in Postgres versions 15+, and bumps FlyWay to support Postgres 16. - -Postgres 13 remains the version used with automated testing. diff --git a/doc/release-notes/9926-list-role-assignments-permissions.md b/doc/release-notes/9926-list-role-assignments-permissions.md deleted file mode 100644 index 43cd83dc5c9..00000000000 --- a/doc/release-notes/9926-list-role-assignments-permissions.md +++ /dev/null @@ -1 +0,0 @@ -Listing collction/dataverse role assignments via API still requires ManageDataversePermissions, but listing dataset role assignments via API now requires only ManageDatasetPermissions. diff --git a/doc/release-notes/9983-unique-constraints.md b/doc/release-notes/9983-unique-constraints.md deleted file mode 100644 index d889beb0718..00000000000 --- a/doc/release-notes/9983-unique-constraints.md +++ /dev/null @@ -1,14 +0,0 @@ -This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running - -SELECT uri, count(*) FROM externalvocabularyvaluet group by uri; - -and - -SELECT spec, count(*) FROM oaiset group by spec; - -and then removing any duplicate rows (where count>1). - - - - -TODO: Whoever puts the release notes together should make sure there is the standard note about reloading metadata blocks for the citation, astrophysics, and biomedical blocks (plus any others from other PRs) after upgrading. \ No newline at end of file diff --git a/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list.md b/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list.md deleted file mode 100644 index 88ca6cf0e79..00000000000 --- a/doc/release-notes/9992-harvest-metadata-values-not-in-cvv-list.md +++ /dev/null @@ -1,4 +0,0 @@ -The API endpoint `api/harvest/clients/{harvestingClientNickname}` has been extended to include the following fields: - -- `allowHarvestingMissingCVV`: enable/disable allowing datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. Default is false. -Note: This setting is only available to the API and not currently accessible/settable via the UI \ No newline at end of file From 0f5bff94bde9938ba2e032bd598119ea25dbf4d8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 26 Mar 2024 13:44:05 -0400 Subject: [PATCH 0968/1112] Changes per review request, fix error handling --- .../edu/harvard/iq/dataverse/DvObjectContainer.java | 6 +++++- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- src/main/java/propertyFiles/Bundle.properties | 1 + .../iq/dataverse/pidproviders/PidUtilTest.java | 12 ++++++++++-- 4 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index bfb4b3ef749..56d26a7260d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -252,7 +252,11 @@ public PidProvider getEffectivePidGenerator() { providerSpecs.getString("authority"), providerSpecs.getString("shoulder")); } } - setPidGenerator(pidGenerator); + if(pidGenerator!=null && pidGenerator.canManagePID()) { + setPidGenerator(pidGenerator); + } else { + setPidGenerator(null); + } } return pidGenerator; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 2ea8e50a896..6d8fbe1808c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4598,7 +4598,7 @@ public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam PidProvider pidProvider = dataset.getEffectivePidGenerator(); if(pidProvider == null) { //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it - return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id"); + return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound")); } String pidGeneratorId = pidProvider.getId(); return ok(pidGeneratorId); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 17dd0933f55..4bb0659a7d6 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2684,6 +2684,7 @@ datasets.api.deaccessionDataset.invalid.forward.url=Invalid deaccession forward datasets.api.globusdownloaddisabled=File transfer from Dataverse via Globus is not available for this dataset. datasets.api.globusdownloadnotfound=List of files to transfer not found. datasets.api.globusuploaddisabled=File transfer to Dataverse via Globus is not available for this dataset. +datasets.api.pidgenerator.notfound=No PID Generator configured for the give id. #Dataverses.java dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found. diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java index dc226d2e85b..cffac741c78 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java @@ -388,8 +388,16 @@ public void testFindingPidGenerators() throws IOException { assertEquals("fake1", dataset1.getGlobalId().getProviderId()); assertEquals("ez1", dataset1.getEffectivePidGenerator().getId()); - - + //Now test failure case + dataverse1.setPidGenerator(null); + dataset1.setPidGenerator(null); + pidGeneratorSpecs = Json.createObjectBuilder().add("protocol", AbstractDOIProvider.DOI_PROTOCOL).add("authority","10.9999").add("shoulder", "FK2").build().toString(); + //Set a PID generator on the parent + dataverse1.setPidGeneratorSpecs(pidGeneratorSpecs); + assertEquals(pidGeneratorSpecs, dataverse1.getPidGeneratorSpecs()); + //Verify that the parent's PID generator is the effective one + assertNull(dataverse1.getEffectivePidGenerator()); + assertNull(dataset1.getEffectivePidGenerator()); } @Test From 24b4bbe89a20a73c8f13ffe80bc70c5b4bcdad71 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 13:46:07 -0400 Subject: [PATCH 0969/1112] Revert "Missing env var" This reverts commit 909244b63e167daf5e065b25d6ecced241ec9d42. --- .../source/container/dev-usage.rst | 2 +- .../container/img/intellij-compose-setup.png | Bin 52130 -> 45986 bytes 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 9f2b2648165..be4eda5da44 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -263,7 +263,7 @@ Hotswapping methods requires using JDWP (Debug Mode), but does not allow switchi .. image:: img/intellij-compose-add-new-config.png - Give your configuration a meaningful name, select the compose file to use (in this case the default one), add the environment variables ``SKIP_DEPLOY=1`` and ``POSTGRES_VERSION=16``, optionally select the services to start. + Give your configuration a meaningful name, select the compose file to use (in this case the default one), add the environment variable ``SKIP_DEPLOY=1``, and optionally select the services to start. You might also want to change other options like attaching to containers to view the logs within the "Services" tab. .. image:: img/intellij-compose-setup.png diff --git a/doc/sphinx-guides/source/container/img/intellij-compose-setup.png b/doc/sphinx-guides/source/container/img/intellij-compose-setup.png index 0ab73e125b2897176750ac81baa8c8a6223d3aa2..42c2accf2b459b0502d2fb42be5058e07b0e9376 100644 GIT binary patch literal 45986 zcmce-WmFwe(83yY84j}#Q=>6X( zM=5O=FfhdaKY!rKw1{|MV4uOH#e~&7^iS3RU(wB8yDull7H-yVrg*7>C_Yn-J7dy? z6@=ZYp<@idmN=K>bUg2#(NTs)81s5#^@w1^G4Lb9&GX^1x zz)Ww?bq6KJekG6Jt0hC>K4s4B@gi6r z6%7w0pT6R)XG*u&pXUmSq=dGIIE@)pJr!J?jsD)-ULpn$7Zeu;Z_j|Tl+toGRzZiX zdVCZ4`@EU7(6-{Nkn&efK5ue-ZX;^gh~ftbv<+&kzwPv55U7Ieq@aj`sbW3o8z@mB zL?_*qABg@*No-(h*@WX2!=9MiFw5!bC~0KxVg`|eOT%34co~n3j)QlfUT!+Km>s1c zOA75XtbIs)e;|w^e&`kUk47-WU!p#e{T)Rl43iW*{Lig-4F-Pl|GBFm6&@~~szKFp~gchtM?rGsmlShbR>gxcaf#2``!29UO3(luXy* z(&7BUb)gbJE{7U+A#-m0A1}FRsV3G39}%>Znjx-WiTjo33N3i>UbSScq@CVH+NRT( zdAcz2)HTXxP0+jWKr7Y8B(cZXmUL*s z)_4KsS4A?FA2%&YmwmtTuVwl3sl#uGSaa%HhfEs2NXwlLhGphj4A$)&*dva8HCCbX zX*NAUo(X8k@qd)IUrCJj^NC0Y@{g6qP)b97B98fFp4c*+&E!9mU_4e_?4wv#_MzEy z%n|o%)!{D|(;G(~@sH0>9w^0Q5qoUa9d8^V&@bRH$mTlvY&L=<-wUs~mBOt7JXrc} zq?{YB2!)}5A+H(NkC}?mL_cru@vJb9uf$%W)x z(d}013+ysATx~Z}vI0Fl_z3w3k)WNO52rfiPp{GrO1tOr=A$y4j98J(X6QeJAv`6i z@lOv|Nrkg3WvVkH<6s3VD>LiNS|%fFcep^B=;B&QD+@C$mZt5fN=LA>mE=k0np5r! zHKnoJU^=u5WDlJ^@Cr&q?PY8&wC*6NK(~fr88*8teS( ze==}KNa#`)zFM%?>A}tv`Qi*eXqa4u9VTxtuA+w4>GXVL0kU};+DUZSnKq!)y`Q?h zDZeLJ{P|EtkA`imYL$$R!M3R$UGu$Orq38r!;m&wke^8uoLhbO8gQbgI?n$5va z?L~c|dzhlvu2`(wk;<8~?a*|NV%0T3J*el@dFxX<58>DrzMsn-no67ra;UUqCfr+( zUoNiVq$Mx;9u$v0U-Y6rwjvU%R5s^#PyS|I1-{UiiPPe_Dw!ilv=)I|Bz>QgPwNH- z;h?{>UZf8!(OcnA)6h^>RvymrSw_c5onv5Rpfz;!p6Nj3$*&e z09F+-@3JoDcbdJ*^IM>pk?}IKRlQH>xQ>!}5BMRBnsxvWOLCyV@A|h+1vN9`*1C2f zRW@+i+YkceL0fdKzo&XmRn5B=Aiw~=&^1wy<-3)KHAW!G>^X9K!=9>Y7Bg7$`k}b% zia%W)_khP~c*8z__Da*!tcRINeXJ#NJRNbuHZe+uyG;1l5FzIqfYrnn6KUfK#k(C$ z!b{O3x{Z&~X@AU)3{nqRe8y00od0chB5bd_Y4Ymlkhhc#onIjACqMm&PQZ+VC9Jh6 zi86E-s6}$aL^K$RO3V#!yhF8`T+cuzc-wGx5~q~S#5Y4jc+i6N!RTN%>gWwTGetc_ zpsSd8mG}6ovp|*U#Q=eFIBDZo?z$Ym=-hiAFj4{u{%%oCK%cgfv~9x3;IiVyxq(ZD zD$V<9vkm4%@y!kdJn(Yc?H=~iKcpvw2!=%~F%MCqm@(%4s6E4z#X=E9S6?b$?ffW8 zR)d0z(MYp#pJDu&Wg)1o65nDI(EjIl*osE5wKTCl{hpx_xjuT7QyCey-R{2T=G~-@ zhH@JLJ4uKoO^y2e8`QjHfBaG_X*RsH){ZkZZAG`$%2<<&`zFKn<7pEFJw-B^Do)D- zhEZpz`q<@ge={6)eGBf(%R<0e394~|G?S)^meN8Vp_&Pubht8Cgfn%`6DAaY$gkTQ zF-l4_vq{QH(ABL~Ml<7v(|6>3YLc!w6i590SY|EmSlUN@T`zSVjdoW~;G6QE?9&nF zAU}7oi?^RRNWvwI2WkEFW#xxJKKtc5-yIhB^MX~o03NKfeg%8kSzp8P4>LQDuU`%& zIz1Wt7ISQZd8L1OSt52#Cf4}+<~o{PhBpl=Yv?N& zDX(F0CJsx9v}5@978sY8pQx!n6x)hRNEn7kl6K|&;P5laAdCb1a_KPU)qym7p7wJp z3QJoBJ+5ad=e6We>v>MBb5$Rlxx{7CkHT@ez^%OwYYI@Tcc^xjo)+A8i@|q2;-mYz z9JtELj8Fo$3QbOw0#KW zy;)q(l~|0JIF;Xh(+$HcN7P_6JPoqJ`Spr+XX@FVH1XpbqOZ3-oPx>F;NlA00q=C9 zD=t7cS2!37yNxZ!&s(S?h-Ow_Xd}8hC$#0f5uPTR0O$S&Zq@O1Qto9NOkb5}QIR0> zHS(!tE7XTeo&cNwlw0B`Wo|=~E2qQu7symrZXt?M@lz_n=g=a6fTk=GC;n@gn(gxS^&}&b|I7VDjqZp4Yf=>(*uyFDkP|5jy z@wtXE)>Nt&I7Sciwr6tYigGX4lJknPrMj2XxcW@iwZt~3u;Y12gt^{%YPP`jfy35%&_CA+=6<5p->pf}#6BMC;Na^# zoE&@S$Y~wN6C@f>Yt$QWR9oPfXDmrg3%hWMA?xRB{)w++Q$+$|1O|7JxKjvJA^QhiNr2!Chmq~3x1j27M# zq0iLai2)YW%+S-~n?k4enI{PI!7t~z7Oi@GK~rB@+SnL^Cp%(jXTX$Yx=7Xg3hpbo zaH;~q&(#LJ(7tem5>g7Dh~C~}c`REEAqdeGjzF2(kctc&U> z*>OT_hzpEs{cfqZCF4&k%R9^Cv9q}s8`rsGVng|wOpS$D;#lGpvlAfrKE51nwXj9) zDGvf$gHd1wh4biC%-5vkaAqH>4vgnTSxYkk$%6n;3Ph~>On)1POi#&oaOOaMCnNMlbMZnz;uo!#S)!9Q-c%gc<8yf!;+PmJ z1w3y5?V{PAG&JUyDVfcNmax~Iur3YjOMy1UQIuZmqN~5>?N1)rB-Y>9RP9HMXGUqM zb)I;69eASGr(Lv_%<~mD#AHTL4_9W#7)J@d-yX}J3@R?+V@J(d9{F#qcVpn~7_Gb& zUj`hi7vmb3Im`b*^{^2R>nYTGeSlxh_UHAg_kcNtUnP=f)H?DWUp3V1>hZ(#%Nig5o>`7APpm;omGN5f+B{OV z)%Ud^t2c}($IXToJBof!pL;{rV*%YZL^>PQ7@$P+4($BkI+9A$ZL`+X!b|%m;&n?E zN;Mt$GA(Td6=OxWb&ZmemRP{Nh_5h;=K~}d6)ai54tHE)BsoLMRfG&pMLDn_czVqL zzT&-r9m+T^Tn@~maSDnG0olhY#-!o#3S=mPsi355{B`}rbc2a z5B4eaDedhVitjGRCWoD}WAid_bxdr2BP+^AQ4!<}3xOb~+WV5jueG~K&L>~ecpQH? zZNWwX;ES$9m$gD}Dg7?l?mS@X2>qc`{nmoV&nb2F$G@%V*V!R7tw5H;`ZG|(q$R*d zJPZ6dD-e-klhqSROp{Uw&o{MUwpN`0Hl_6%ckQ@gBv?capq6y!u(!Y54C#8`M)NT< zAlw1X)!cixwLkURtTk~uh^y@!^nER|=#)nSe7HZ0hk6EIK z2aDRDU+gzNXYqy(5KUT)vGz%ER-1}ne$hB>3wh-qr(0dh>@2hKcypoEygd%ty8m!< zoI3_;!SgwWn(YS{%)pOhQ~udg*lOEaZ3KlYI!A!)yg7Di3;I021wZ3PFo82j{#*j$ z}UA|eTnY%Z+NSJ^~d>ly%tjhU= zi~dXSo3|+oR#nwXkbn<9Ap6wtF3``+s1t6EZIF-C-`@9_Be>*y*mf`Ev(1%`EiZ+4 z8X^m598|^rx@MK@=(hQ**57Qz2mU-UykQa?)5o!!laXo6>sv-*7q{mJPnpOX^5Xc) z(7<_8m%;C9s7Vrjn1L#)6Wfd(&e~5#4AnoG_7}lQrN5Jv#~Hj>F_9-?T9^RBp)8Zk z#>@r<_gO@-Rx7FppumqFB{*-$+PF!(zfocqyU%d$&ydoQ3Ro; zcos9C6&QGIt*oh0Gk-VWOs>c#x|n>k>nrEE!0TL`8!D*cgCYf7YO9Cq zcJ#sB*TEHbf-}csNNa^x&@RH}a!pk7S!m1Awx{izF;>jaYF!?4WaW|V?#f%Lh=}r| zsdW1KGwv)W&e*=z3+~R9odFXV7hmu>GKr`cxa*wUQC|Q&|4>Yk!#c}r>Z!OLe}v@O zVfS;1#gE$htkn->P+y!EDhKbT4t0e9cU9h*5zkt$j;o__*E0w0 zF66)@f^P=Az>pB|L}hR`Az~I9h&`nZGz{m9(urvTOL%rEjI?_u=tG$ZDtS550f#t+ z>CEt{7RMm|L8U!Di2Xaog2DUktR4!EaUIbZ=$OvQ$eZy zrgwvT<7sEU##y7cXgP5i%*&t`#-Xu%u~6BE`o6jag(TFBz;4x--@KuH!l3H42F`}X zRY8c&OAjhq;Aa||T4`|U+l$_O#~(Y1sSh6BYf>%VeI@N3Em*1>UYY52>!6~FsFSs| zAX50<)j>(duR_Z$ip<^WLCqd^QYCU$|It^o?+qr4e zQm4bES=Ws~V*rn*OW_l5Zw|P~06%4z_j8(8jA&xI4-fKb_1{Q&(RkS~?tC_fEY7!Z zvYskg{O#dR2jU!38q}K!hJj>i+&;P7N4b6?;ZOsxw0mpTO?Y#dOwyUJ5pl3Na2#_; zupC7Qm;%4Z{_s_;^Q>J?UHpc!e@`}nEuSg8)8Z_pOWIY0s}LG_P`{vsi+52*$Y*Wx zajvze5M%jv-|IINt|ZG@0b=KlrioP7mUK>MtK>!KD_h_%FLXb@Ed{87vduD_?<{Ia zDv;bE5>SU3y>%wcqbH&`oUL&2u!gpM;<25>s)aiF!Q6vWdwjl@XC4Y|cLMfprEt?U z{xM5usZ5Wq$pkC4(*bjys=xg5%$w4Y_ZyboHu~N53b}`{&YDFUnAxS)2xA=0KB=kc z%E-C0eY(`xuEKmb0*+i+_&5&f<-L>LDWgmO`^m3^IId9~HBt=c!lu1nq=60QSYO8h^ zlN(zrc!?L`N_k-t1%-aLe_Oe$z2EU}skw8A9?9OgTxNGyOKikWpABCqvQ57r;jDE0PaYn%#TpmxFF>1A7%9f z9044eU1s7=FAlUr#RHX_!V^dQnFyIj^yNDvn}t~G`)ANOyKA&rTsP)4oW8x04#0f( zfJh6cfr-~9R$9G-VdfGxMzj_L7!sf&c>9of1CZC{UFZ@n5ZiVKu?*#`= zXbFo10ZsW)~p7HE;W9sB6hwIgxsq;&`{dr!b(yy zZ7H34*%)y)-Z|nsF@3-7gAF)MKu7Bq*r?6ZPG_JB9 zYJ+rbO2*7629jjd?Snj*T+hU*2;rr0N-h3PzS+Y>@@Ho11^6 zXe!MooiN`aI?i}bIpT^;z7crQYCTx&7MF|*U|tAV){dyEx&Qo(gZ{ysm8$~RZ(oRB zJe0fFzmCOlv-Ga*@qDZTp2Y(_3Dx-X(M2f=F;p2YeqaRif?r&93wl(|rC6IRb+zjv z0)qx4oLdy}s3$sp$K5*Y^s;3&Zeu>ZB5`(O-`wk88#+v_+>~rJq^c}^x5&0^9*u-~%cuuk zYa}a8Evel?4Q&osjox>icc#C;C<0+H1Va0}cc{2#asw%v@}b-iLl<5}7cG49CvC2e zI=N3TRiGr`N(UjqL>J0R_Fw*{( zXy98NJ(K!JXS=P@!+cUJ;vkrkywADhk$#`#1j-;0xDb=!2??-bZuU3B8uLx2QS29G z)NFV%UN@sHI_)nqn$dcCR{0P?AV>;MT5Rx7QhzvgCo0nmsjRRV$F@7r%b+*#4KwX) zFK=2V(OoBsU+85L{XgbCh>H(3?ho7kmxY&GhRt_LJ^LoT?V<6KRJHpbYCoUzqjyN| zUt0b*tCY}8*jgK4WXDiZ$+M*sFgI%A7yS4A9JyuGe>8qQ_CG1!JEoU_K>gRhSY*7n z)N`eN2Md#QmOmTYg`al$-##swor6DWVPFuXktrxu6q;BEqR7w2JuUz1HD!ji0~m!- zm_K_sb?n;N#fAXXEZ-CRUJe^kkY|N!9}5j!gp*Pu`J=|8EEHBO8-_M)^M|Z*L79B|%C}UB!eQ zO?7o-|92{!0;bpyQfH;@&%m=3EH;h7(8Y-b1j5snv~C_r(FmK@>_==lA|kH(1h|fV zo?LRJX3fB&Q_;|LA76Gv{u?k1O;}9q<1lsAQkMf@lLTxV4K+MGhMbWTx~+r877Qh( zfGTYqg^aS~TY<7x98p>kVR|iOnjFQ5of&G>-vtjti&@hT9yW!QtHwaXlhpS`R98<9 zC5ZArFPBqSC-;2d?(ZLjWUv>4NU$aZQZh;ZduvC(8Cw)MYXU5Xl@qYNE2p_>y+Et| zkMi4O0J7?+kJd*VR2#OM`)7SqNGWiL9^D-rgyeZJg(_goP2_;irJ7-fSDKXC0^k$}M`V*B+@KU7{-VH`vTc)zh$fTW;zq35mnSxj4{ zkH;w7z8}>ZbeM4ARo;%E-=Us|!Nbo*dOM2zmMlKJAu;@!)#jeD5r20lmv2y~NWu9) zk!FM0fGptpk!T*U?sto~latlk($QtSGLq4D`9B%hQXh}%|M0y32vXSQM>4y)gMAHEg~37ftNJp!-l|69k=Gy;}RWV=RN72 z2YA#?&IK;O+h98e2TwV(p~J#+mz5FgpOkFu^jSv7i)5zC&uy*a;E_M+Y zjy2J{w7In0p3zEkrm7l+Nvym=0$%}yZAYgvJu`1WK35ZxuFe}G(0P@%16_w4hFW@o zqy1wRPMxoLs@{hkA^WFAgVM)wq1vC|`>!VHcgwK&yNkby-WZ(ZR&LYIX0$ExLV16H#al#=fFY%v1I1 z#4h*BCD{MU_=>%bWPDm=u79CC8NTwT&Nli$KI$qN#;of!tc2Tp(b8YE%@~v)GJi8$|Bdlsy0!o^n-Z^9Vvgf@{KgShv9KI|0aVYieN8Qm4dpXk{**BwC zeaeG2CQ!BP^rTO~a2H{>ki_2h}Y+~+(wahrH;?Tf+E>yo9eHry4-KO zDz8{>Hu^xD!dQ_i;PDgZ`~BH?WjgCo5fP&ZG?~ZGoO`y!M#RlZmk_6kpwVX9&Y%rJ z&ukn6hk}A*$BDTrLr^PC0UB!9V9#SwX7gF?YrUr*uQ%8#ulDmSqQM22QcH}1rh;kj z|ISzH-HR^eSa1p|_!T_Hk}_Tlf7zm1HYmWPxB0@PGHwMZ;KZE<;Pwmc$e53Bgc5XT z4_yHRRDsqI)WrsvLX6V>lMA2|-rED(XmZ{wb)6Ad8}0CLzE)OKp}AlcD^X?yl~gO)Ddd-ek23ar@5=%ZCj>&uy2f6vd3~>6h(5f z=plF>MT=!8&ekE49eS${pXsH8Z@Aa+c5Hd^Zb9Ur*NQQkQR4ASi#F5pyTVd5iG{vN z5nWza#$0A#DpHo&BXsrNK#)W1lQMROFItgRGOp@Jw@}&S)Ev=?Q_RnY#;gm#wIDAh zOB8u)QPYnFR^Rw{LZ|Ac%Ow*N4V;f(mk-G3enqOzbuK@*mSWer)Q|GLJ*sZ2lKg8J z@J&WWM?C!|PJZQmEJcubzj$#bG!TIzv;N9Bfn#et+s?l00miq~`1TXYrrOkapRJuX zl8QmMyNjZ0pwW=E82;NQfoeNA?eewB3xRa4r`;b1fge1;6^HZf&P&Tzp_%!=zPYb_ zeOnw_puaD~eLe+xRHk< zy7P8DkR(2jnfS2-_)7G=iJJ80f-NzGByI0yv3*)cV(gZiSyO+Eu_icSxqTJ^pTnMU z@JJ{Do_%9B)VcLw(XQ|BnF{Be@t%Z8*?aO@%n@DAu`b#VPGzlCC6nCw)+DnIYj-B!mhUR^rExC+y`rb#3JI&bmX;&>1SPw_NHAL+C2RRUg*`mAd$?B{7VWq)wB>y; zhh+NBVml^Pu*hIi@nOSx|MQ^k(zK@FpvLpww?jT>;qr4aYjriXrQydrb&x62?p?U3 zv`SCiF+js>a`&pM{?<-28Z_sG#B3+GIC!l7Zv_*r0}=`eqe?XtI=X?m1ZU@>=+3sf zb@XPSNV^t`nYyu-1%>ocxv2cO`CPuC%r4D5tCime!AQ;5uO(?dMS;!*%)1Pm(O3ZF z$NU=oJ9>Swt>#N~zxu3#3>>YA_w(4v-v`83jLqfv;h|Z4E^R0om+^-7I0{K%am}k^S7)n_R*5osZ}OI_SC3tZ;RcjcUQu$6`H8kL5r$-Cbp)!;IIqFqFHL_q12uPWb~?_tVKCRjUxm|V2#?EKuZUCc z2#y6{3ny1ixr>p&qmN(YiBE+f2h87MU@Tx_dWd@$iI`?9?1D?VPEd6GmCkWkxnPTi zHviIlj9u4kfbC6s7WXUyH3A~>ua_DGL-FJ9`XgTv{+It9#Fgzsd(|~PE_m<;Fiba~bi@Baf<`r|2G zlKzhVzd?-un?wG;pppL%A09yql zB(`M!UqILYG4)AG=r5ij4*LJ$Q2{1mIz4EHw|&9p(&X)@m7RiGOC-PNN1WW}dV=NBnFOB=T0_rj96q$L;y+oGm<1TS}JyU z9ny?hbc&rOn1bpkEYfc&r94{ z>oq&hu=06%Lc4zZhZy^r$#87Cq+JQPbC#e8I3Y#%bq;+&1obkJ1zx{f9!Q{PK=mkM zrT!q{Yp!WFu;eg(q$y^544jQs#f8o*e~eg; zcDq57L)ci8#Lw@qW-Wyy4#(eK2NmLkE7;2)FHnm?g_GPkRaqu3WCT&^)%{Yq2F9sf zpFAW3%;|d(^C(!3hPCDR&TU)2Uc*V7gdjIqFn6rk!U!uIjcI)s^ILp$7f)?4306>0lw3MZi_cq542LLDcS z9TcccwOu)Jt$l+NW;PB}5Z)i_*~+yyn0z(UnLg21Ga*5lc504jR51hgA1^4uI~^p` z=qgy8(nK&ob*u-E8r`qNbxDjAU{9yZTi&rSHT2q26_!_4x|12O51Q1vl1ropFq1&1 zcEAG-9*%-me7Vz9nQ>^Gl9Exi^dcq#sCi%=%n%hccT!2;(I^!S9lRWr2%}9BYV>@8 zvYM83B|D;z6S43Fdc=f(54v!Wb$8z4)}2QNMx0fM2WmlG*+njH5q1sPU>--iJ?a86 zG}*0(?C_*ZUVY@}-ld$aooeEAv*-F2bcEpFovBQ|bMEdw9a*+|-XXDVDV^1{?kse!?(cmn$r>hRoM&}q|b;)tAQh}Mgw+}af#`udFkQ&p`_yoLaOr3>&w!U zVu?+YTGXoI`L3Pm%q(hKTR&wM5@=`XEHs%YALW>hy zas{pQ$t*@N7yO;L>FaMkXt*`8d!JhSH|mPzmwDFRHk%Ou7Bo%$#Y)$fGd^B+Kfn%h z(DbH=sOI4~J&B90TTJb7=LUZc&B2p9#FBt~l+bWx;YGOZkn{D{e4$c%d^;HKs0fM3 z6SE|n!{4vt#<=6gaW2inn18^Oz%}ojP%$e%Ohj`t2ZJP$XXcl6zmN`(L`r;{3sTW* z_C=WKFY@M}!wkheM_e_@JmJz|k`A<`$8DLYKQd&f86x`*ayvc+=-OK04%m{j(I8m-KKM<^^ku-Y z(&Bpmd+4FgctVg$5l31r@X$~pB4xEdWqg^7<#qTdmJlQq4!yYEf-asTu3~%(`}j)c z_9Lfr18tr)GnE$#W$%G>O&Cge?HX04Bp zqH%kj7VEx@Z!Z70mtX3xN6UjvblbW-+3{mNlZ+O(LyH$rsc(JRUui%RAjS@M8;U>$ zqTY*mi}_-N_i6ugp;Kr6xMvGXzDANHy=4W&A+l$ycs({*CA|-V8sG#2$ zYwHwt8+C!tb~=tW{@z%@-blu^^;d4?Yvj$V!~JfAVroGs4%4Y8T5P%9%_Sk@<~z}g z22}_$)&cRH`hFOhi&2-(%j}14LF*#=&r?5BqXHaOgKvFq#&t{ zjX{5bRuFamPxH?L`Hupm>@mN&IK{;=gn=J}(O$>cdb2OdCk1cG6cyhxP8Ap-X}e4= zeKidmC0bV9QdQW-7f+Hyh_n8&J2WTg4jFK3k|yk>*lkgRUv3au#BWR5sFcI8N05}x zF(a=&aLs*;qJSK*9Z*V->~m<*NY~^L;rsdcXJ(qf zLf@i?&%U0x!wnh&jG!%N?$~*Aep$+jUbhOBtvA}>@OEWJcg$zsv^WV^M0v*`5~v31 z7M1Q0CE+_WL11sAhAH3l*B{A+#$;RJIfR6HRL1Auohd5X%6R{3JQm8F)4D14nEBz5wDZqhhO)iq#A;Z%6|(QkX|^Pr%jxIp!R zMwxgEq#C-(3O;BgR!pVNdxU^>WL|c~oh8vkO|RTcjcJ%k?Vs4>D>x ztxe7e4Wv!4A7G!C;iz5RDSK3;#J{5NL3(HSs=rI1#9S$};ByD5la!;&?aNe?lWjZh zn&^CHx9a;quFL?#yY|&$5g2OM-q=NTYA!N6J0x!j=IuK^Xopj;_q3Y6_c~NjQ3o_{m5m(a?`JZ}^6r0TQ=hnP0=3 z)y)Xp?sYCu;f*(?D`vWoNHXWzr6qf9=e~Y8NS)^M>}k-wJMDOO0A*%9Hf3_X6|Xpg zDvf*2m$Mcr9sflGrfb=B+d2yj57PT>A2MbPktnZhjqg{}T?Nn0Lb5>p6&X$zU85;Q zqvhvyT9ao)>jh7ctlWhL+=Nv@0k<2fgex~~Slyxzce#!T&EP{L;?autu$q`)lTgk` zITFAw1)-8iG5V+QF`^BiZ$N5lgq;i3$sLTSy$#z7OB$-+53Kf(1YW0}J+cAm^Df6Y!c41(`-GdZfWXg}8* z10mjI0O$#c#|Te1I*L}rMOE8jbaXI01Jz}J!f!|>w@PYJC{U~FF(Hal{{=p9AlXU4 zv)f=arvNqwQ<4LPOK?t}0rW`nHM+7Jw5@gL9g@_g-sIAxxYUtbj5ndV}TUOIdw6_On0_V-#Eo(vrxfaok6o4h{4I|$m|eV+@l$OJ<%7;G*5 zm~#ID{{pISk?+Vg3@t;K7-cBvZ+#0B9cllzX|3-I?t6qpyu^R#xa<{P%JBWHZts2u z59gxzEfH+i8o}i`9E|dA#VBb%N%hwVD=UEki@|YGQ^FFX3#csT-~LpvC{Uorg_#5v ze=GQHe{?IuMWP@Q5l~>&+1-Ed^ha?8*gx$e&`NY`8Likm7)OR8MnmLKu_ zbY*im9j#aoezELs$LvlRj8&I+2X9+)8}FZ6#{U6q+gyt^suoa)xKTZZmGZQ7REE<9 z2BqbdH=~}W1dkY~>>=(ilc0o4PN!LR6Rr{fted!?6ZL@)l*RCBYHP$MeEd!un~@}K$#PuQ8eH+%s$FBKfnK|}n#ObGckZ~W zj3CdM?hr?bL4omS4(d;;;tavQ!)|J0m(pU(a8#A>m_Ps2l7wBO#~U1d{@vXkz)XW! zr}3?7Mm@XOI7X3TP){myC47^?HHEKWn}3iKgWT2Ms{qqz>K0ntpwCUNAx`vW2DM5i6hc1itMhCm3i@g z?VfYm z6sOPL?_NEKC&<5FY|@6vwDQb`u=9zPh1Q3_VXshN^=_4P)$q)DU_1<6`<< zFC#wNpE{NO#A_?^K*qB$8T^{P(e&GAo9FgLEV|%TED4?{FaOZ$vyi&Hge9`%=(^m^ zjV{n;8GnexQK7OtmtV9!776tFcvOux-`8`=lS>HfL;fxHv%AiI0#*(ctU8)#P{-&s zDxlNG&WnG+9!-f3zI6|rf$k}6UvNEO&F_yXcgh<6xg;YoxX^8inwWsYVM}J#?EDqIwv?e9%QxAfX={FE!Op8*ljTkut0-r_t~zT;V@!St0sI1_X_WMn(=pqjhWN!!HgIpJ;q__7h+STx(b^B8rsS%wmVZ1%e!x%CBM`= zl|?9$ZT#odd1%DtN}8&v89LXodvi2qECk=>z;m16mpTGTxZH*m0t$V)|v+sB@bI;;Qxo$fszbGlOV)y_G1YSJ(nU1M?DN9tlQ2KdBg3@X8`78eY95HrxH;RC7I*sFit97$0wP z9yK7^wAoQ#1(<1;-=7Q({AxPe4}Rd@W&DkmNGUkF`hi#^m}v!3o1wk=1QGwNn+G#m zIngW8eszp!xznZK*d6LaN91qUR{;a)xth!t1IEJoRzT^$Z0fI9@7dG`4S%z#OI)Xz zW}@?sd9qleWPdlCx3FdsM7`YbSlcm#JF~)Q> z#a%@pJ~96(P!O|yd#x{fsBZz({w^VC=m|shge|o1OxAIzdOZih9ipJL!po#xJ>Ats znnm6jLa#zM)zy*3M%&c=_@Mu{@Iks(18XF+FMtutX$rwI+0^%i+-677LJ5!ER?lie zMTX$h4em_rofDL<_dbgWh{U;l>$q^*1PSokYnwXa_T0)6!5ZQ&203;pilfw+7Mw0D zpqgy+;HK`n5*jZ7i$7dD3qhOz zS6(^f+*5qXqvX2!3#sC5>{rSPSnsk8?Sj@i*I^H+h6f_=g5Hx+U|s!ViQrNjfPvK< z!oNBcsLEQ`s)`m3RCQT1d$GEHy*oO@3oS10n>%O}O@ed3-npTC=A;x+36nk--A-{> zR$mL98|X(-O%RUX;C98rP$|&-(;WmGBUZ{dP@o%brd$q`w?Rz2{y6d#joqBsnQ|@b;4X`tw zaDqC=Dkfl=aA+iMJ*%KO(gF*M( zWz1(!gsDm|^mn&Ptd5=c^K=9pRrsNJEl#7_M~2%^i_5(FJhGvrCyl4kskD~4(m}|= zlbr-VpM7*rUGWvIm>c`q5^f!T@Jy#A$pma?j-OLg_ol?jBp@PcH1V=?2a9Q<3?$>` zB#_9U%}QPB82rs3F8z36dUaVitHryU%g20c?k?{Wc)Qnm8v%!P0UUOyp{&W?ptkH(z>2hmrc5$3OqR zNF9)J1{$n*8!0qUXW9N14j-%f%)_3T##VOL)kgS|WXQ^!LHgW?$CwHu+Awq7JBqVR zJ+pYu;acQy?^C&<20)9o7-G?A4RxL$$Y%N=W2(tOz*_1)l4mhw?CSz=wo0xSrUYg} zJ3;72$lS$f9N5?NHQk45M*={s0v=`8d@#-FY3)jD(6(4kRZ&WEU5DjivVpS-m|Xr| zx;d=d%Q7i7s5jTsq{kj|gO{^=SmFKA4w?IJ1BI}fJKy1SJ3<`KsGHKSHfks>mM3!CRs!s}eSyEem+ z_Q#9bzFza3dNzQ#YkrsC6qeb}JAR#^c)2G3y#ij)`~JO@;_}Vuxxt&fBT*??A3Y!f zQmK2rhBRym8Qoj-xnOBjQ+xn;N*mE}ISa&qGZu|NFZx79uO|LEPM;Cb^z=j;yes95 z6+jqwe|;Gim%g06JS+~XW38i|nPcPn-1F<_9H5is7x#a10elPQG47rEhV`VvnV46H z@m)bxtFE=7=Uw_TPI2IJfv0AHMM@ue0Y0bYZrxp z=m9?Ly)D-x^8_5}a&QHB{fMXNY`)dTcfU-;ex9YXl74xjI%dtCz%0sxrrnq@yt7fX z`90M0rApItZ(OsvW8b_pqm#mr)eYvzM)b!WXOaUXtH!PAYK6l!h!^N`k8n`Npp&!ky#0ZDrUJzR-%xQeu>hvl5~5bTw;LX->3dWO zJ!e#YouMtJJdYPTfv>mf0(vQdkYe?`(Rh*u6UxJ89$E3|j8Fb=&-*4C=)aJ;{oB^d zJy_Hm5n@~=I==Gf3<39}=Y5%UoX<=YXD@eTJ*}k;&b*H08hzfg-7IcD|3V4&9+K1d zeHJfvVDNJg>TpXR#^-)!u;-$Vp7|Qtyd;9z@q#|-(?N)YynM!1-FAu^BXgxBMOSS% z>-i8a{ajzwNR0}e&FGiIyE^GI{(WO4RYSciW&4%Ax?{ma)NyoHWzTD0z||WjO_XEBaIG zu*YA>l`q7lkMJAg_1jztBz$i5Ew8O(rD6#t`Aod2GkU&0?oEOLZ_n<>?drL?*3wlI zJ2jHLSHlg8EP$qhR_iei-QWE$-`kxK&{F5$QW740p~b|+UZt_%JJSv~^t;3C#tDJjO_u4~|@un+(>ccYXByB<<7nfrNzQ?fV@8kMpjwk7J974(;x3%H`8&%9d94%+jeONyc8tiYHPg_~L|&)>8B$WdTF;(-*Eq=gf2 z2H|00Cq4^69UTY9OMsQD&{dPoQ6PDISLQuHApOGI(0W$6F<~-C8z=1-R}Oup7DgJ? zyZ8;u{||Xz85P&Et&0RmAV7fN?!nzH2?Po5?iSn~x*?F@H13iF0>RzgrI7$Z8h7`` z8hy>)XP^7*{&;uXf3L>q!5YP?TD4|XP5oxATjcnBaUEP7c!7ZU?wIu9weaE>hV)k* z&Wnsz;9xSZ4VzvKx&khwT|?B8g_^qX&ablYdgkUhmu6>}!~Cb)FnA~Q(pTq_9?D8x1AVK`EAE0r?bQyf@|1EmdJ2;roG*s&Cn?-$N? zh}aLO6@xzgj8Lj5x{<7=rJUI zjGbl{uHE8V2oCp=GfMo|w=hASV zrygkqdVA^{kpA>o2)4(;A3jbSLSOY}c{ zKn#PhFau}t*R?g8t+9-5#(ygxt^q&5dhBo^T4r z3tHFR6xLZ=%_QYrA7odu0v&~bQ>{??GtjN$PeCa~n#bW7z4Ex%w zYbl%hOtpv#@e|5h*NGnfV&<5=Q`tZ5dptI;6R<4yv6oN%jCQbfJ?XAiLd-?tMULG7 z5H*<$_`F!;Of_GP52wRu;edTqtEu0te9;g0p!Ech#Pea>imx0H?t~qeA(WRS?xxY@ zQ03isYTaX6!ExS#-Hy%ah-{=jKN#L@wDCOMv`aQu65|?!yyG{DAS{Jqb)ZOABXm#k zaC@TC)Sn!$zw_h1!=;52@Me()F_*Y?l)P)al%;%}s%$%HLBx~FMt`Piu_)PzH+RNI zQmD<2TTV`n*KYiadAWaPI0);zoyCPP@-wm&&$nJN&13pNw5hqPfFG+Hrq4 zAH*BWFE$&WG(jYJ_}Ul%Pzf_IeBwl^F!y5^=eijC z5mL0uLx&&yYY7Md#}8+D$+G2|#ZD2`QRd7<0^1u-ZCzJ7fG&mm0TlNZ)NPfK)5SYEhP+$g(Ut{iltaUK_v`kc%?2dR z1q-(eN(^+Wy)F7N8unwv?Qf2=6!a294`qvWJJ>C!X3Dv3zL($mc@eVv?cuOfoM2u7 z6V$KdNsvK2W;?>yMn|N6Oc{>ZJ+4k{mPKwAc7Thf_HtF>x6c+s7e}f3trvXOLoGBE zfT_n9W#fJWZKu*sZ4*rq+a@qnBW}z4!4#0Wl-t++N-?cT1zBekQmvh zi9@a|SQtskkZj{F!K8E^%j!-19VCy$1J}kZ+TAZ3311Ns8q=1-Q%sn002;KVm7W|_ zh00RKj%n_w5aVPQo%XHN@sTjd4KZl(mR~-S>Slm6P4JVL*AjPHG(liSHm&iB^gdHr z0ByKFLss9KT_0C%14q3AiR?It9ZNzuyImZZ*t|5>vAC+>Na4oR`U9OQLbk)`f*Oy( zzrzXU8QbKwwH;tS2_1M%laMfwaVh#(J-QOV6fTITW-=HVMfxbF&lYA6L=}i`Oq8U> zUt+C=Ptj==x(ko_cfc77uUa@0UVKdFrsW}ap`;hCgQV?@Ls zB0r-@R5)+YpWEFclFr;x(_}r!_@>^^9w)je)Cwb5qhSq@_SdGA{TM)9SZwefRxvAx z-aJzlP}#E|bg38XN?qM=e1POIp{v!StQ-sk`M}x4uZwFv@OIC)ArAo43 zph!0PP+=BeW8@;M;nT2*K~A*kb&bSngTBiiIml)IOL60F>C~pz$bO;S=3}(Ee-CtF z6_B05aIXb6^lW};bFMF#S;evv7mZNXB7A4y?%VxNTQSS9Yj`cU*W58zW^$*w5uD z0aYEHuj8w4*G69MWw(TX3uwn6`m@yK-1B?y1W#~|FB&*DFcC^dLC*wS<@ zbVPh)K6j6&qbdZ^>C}$>uvLd^+rqLql;4mnrvjg48;?5WU8DAw>)<% zbGj%9ZMDO9o{F$QQfK!)W#cRR?0Z+_p9pPi!~Tz?qqknbiA4z>?7ec*j`|oO9{yy? zb`L%~H=a{=J>!Rf2U{&C`5%(;;oU-k=44QB845%Bq2|VWT z4rxPcPHlGIwjs+3K7Xf6;gkK^_gpLrW%*$!Ur{~nT5WTj^xC&8SVXz5U-z}cO&W*M zBmC#}YI)P$kw_$Y-_roR?lvUC0q8z?SVylh5433~G^&x9yajghPX^zb?ZcVVZ(_hm0KH9XO7G2@y8B017TqAqk z`pYUCQX(|s53$fTZnGw=@V*(dxH{p!pXq=QN0w|ZpTW*Z!k?7I$FM~V+0U3&_Sio) zgM~eG52T4%-G$Vl%GNMcv|_veM&UFjI`a%v+EryQH~ z=XoHj9~qhE0&ze8!CWaQnG>pHD=Bix2(k=zNSsMZj33p!`Z+VJ9-mT6p}py_@y!E8 zUD-IbkWA=+gk7YNwDLom{Zb2lXmAm|c2I!2WQFB3xg_IhL}R=RH-XiBu|Gw=9yGq^GiGav zRVCLq8?NkRw>Z73hG^}WpCwS9uC(_%@1~1*JLsx*n2>dJ7ul3UZ4Gm_Q8_ax#|Fjf zDtB5Pbb3&&X3Vbk>OAj;emKCjI%8&@0?lR8^&g9s_co1Z7=ZFiTGWclO0QP$j&}tZ zN9$X7YPRf-T$A@KsOiqx~>of6I?b7XNO7T0iyF@e>dF z!-D;px?b#F;Lr?rs?CVa)(AtvZ2p#SLf`4ORYC$^R)LdmQc`Vnnb(VKPi4b^RRzi} zOh#|-`%9K`McTqv8+ppR1)X)HCa0vlt+mJlyCh_ zvXp|U=Q+maHf_Pvj7B}-;~3r#_386%!52a07sLx5ZX#_y7Bmvu_*IAH_8&(q7O*-L ze%u!JCp7KZS(U((KmQ1NKG+-it|2(KJ*X~`XnhP4h!@l*z!3M2(%vF8J?56PtPHq; ziZ0(XIW`_D@QXHZS9Bb4tqH*%lm#Jmr+$$G59?lAij5$`g3=!qXP;TX8Y06(HxYi) zEgj|G-9NJRr;Lz~^@4_}9#aS#w;}RG9)qnd{sE#&q8nw#9orz$j{f3c;QM0e#$9iI zW{1WTYaY6EUr|kgZ+cr|I!(-~qMuVEYoIiq0TS-S%t5z7EK3vA6~o?*(_`oLoltM+ zXKv9^*@%>Y4!m-+XHp0zpttdtxby!UX3&}lz5CHLbj zsn22XdaC=#qQ9!$il(3Ob@Bsf2G9%=F$#$O{2tkDUsTKfB$Ss6?_9@@XVO~F;O0O- zFCC}nCK(D`H@1Ao#TVRU5>R1DF{zc+fr+7?&YDH zRT+3T7x9J82CGlXZF#za)O#K^GgjC7!*6>pen0TFco=qc+Z_4>e2$pEYWg?xv?o_3 z{ZL-X_L82QH{^lL-7uNUD6W;-jct{t>+->7o-wH_FTUzJ6|Wl&2u9m#3nET>TcY{4 zt)Om9N9b{akQm@$Vv7u$IUU6$M9YUS*02NqT)&&|ex4=QAWr9gMqJ%w8Neb)X-@KG zW~Ce*JSsiNf@x!;i}^Kp;0>W=Z?g+4!_oEYc!LiXmiTBbl=P`9{Ul{N@$2?yRmjV& zxW696IdgI4V!m`87%Q__2mRAL0U!s6ji#$86CmLRpXnFn&{4|a5?u+@0DZseKfE_r5;4l+4^(i6z>_a4 zn;Nzj$$^|;OC)$Q+>zHF^yJYB>Ks@a>pDB!pP_=w6Wk^kbo)23!$nG_|C!a4ih)7T zoevz(?vRIyM)#=DLH#6jXdY|Y`5y-vDW@d%D+>;*?zQ1oy_D2O&X0e)mJs278h8m& z48DpDyGs!E+#^Y)yKRanu>Z;D^dfNA*-W2bN&5|UEdkt$yNCNiAa*EzQke(y-1SNA zMe3k?eE3?QtFe&(tLHB1fBO3mugM{GTx%Cx(?nw!1dR5fig0SeZ%ro|iIvr1i;Tfi zh-WbIaaYjfG@Iw28)JX+DG@JDVo!;Ed_fC8b}eXf_08jGT_tM(-IZ6qjR_HIox?ix zwvrV?MFakV6R&LslS8r`=JAb#gsR%B4$pG_a@!!`zNkKHUIT!OZj`$JFnpsSsw_fM z9f^UzDz7HGEZu?U(wlm4FwO_MTd{crPG0pVYQgL8m{aWK@O})a!^gD+WW1;j zTpGnNOnw4Dd(K3a8f%V3MaWb42{rx5xS_jr%)%s-e>Ngz@sVFDWGW@-6kvd>-Y~KE zIl++RpY{bWlOL}w{4WgyzgB0M?5lhmTs%_vRA`ta|J6-ZKTYVbU!Q1b7J>$8)Ied+ z1yohX;g|5D*hldKj!+7 z89uf0Yn4W$QytfHq`@?iig$f0GGTVP2C3n!Rc)7t|9j#!A2St~qV*2x?1)qoxkPOB zsBmQUD9JzvkFwRS74IZ37vEFAG>QtjU;S|6Fm;M7>D&g0x5wMv;7Rb|bOj#Yu2w{R*4jGV zBOlOY0&bDEgU(6ak+Nv7`T?g^fFV17N+2#H#m@8L5;HpY?Zj%(H=kVb@69YV{14fJwzc4k?L7XBTnfPgMPtMw&nN4jy{8udwmRWi&729Yn)CFnkW)MH+ zBh;$0!B%M6y+3Y_COTwJnT%?-cFK5$y@mApd^HqiVcr2o#AI|SWRrbSQtsFNI1@0pbxSPHyj{9UGyU{2W%t=yHJ>EJRloy9S3fM~(TM z_A-?)2mUgm^>$rhfJ2$^9x4zP$ovP*qiT#ShDlkRXJ%sb{upXoHY?|EFcOB65rdPR zvNz+(^_(w4)@S*DhR`b6M9Pu5e8BL&h0LT@HkPcFu+L??h=8sfgI1Mj#GMH6D1pH! zLi{UvmhCK!raRwz2)4|7`&C7^E&JMUS$xXQ07LiF^1jC!!@n$k@U(BG^*{6<_wui7 zbtIIp=0ktKCXL8*BH#0$dfM(JPn7MRJbjuBZl))(jXsL54)2#z9cePIj^it;Ui_W>xbEm&@jhyvElAJYprz;PcxMYPmS$811G~?gqXUTAzoHp~lWU&K*$x`9@0l=b z;+A-Th_TgcNqdg6hbzv})aJ}VAVN%x`-#NxVtwh)o&c)tMGp>1rPM)WWBcBi{gD;w z!QqB&e(~kC{;92JU$bW8tla7IxiGx*+3!XLdQ)6DWX zlzlKYrMA;5&vQ$0EE&PxPvYffQA0V#@4S-oF=oZzIF^ZXfBy$>b=icC%BKo+TltEmJZdyFy+^;({fafn2Fv z3qMJRG>yj%^8^3N{OAvQVEil%K4G92@?>_-03k8O!V4QR=eIOUK<<%vX4L9w>8&w6 znQ%;R{_0NeX89l)CmWGV+)ePCIt> ze5d@E9Z`*3e`VNt}SbMxuoe-{kyzfibd`?9d^_H0dZ+%$iatNlB z0Y4Z_3rAipUB`_WIny)5#V$n4_mdfz6VOOl;~f<--+8xAai5xNl5WfgIs_8uNEs-h ziWFg1m6Fd7x_Z!ZO&}-}vsiI-_*gp41+*lEi0@jA!32Oeoj7VDp6GhoOo6Fp7_fh^ z0F=hjp2%A4kj5#jv(1WJs^kzsWNyUfm&&`N_U&V6(!H?|h1CiScI$!4tZ*hsXEIw_ zD#M0mU3p(6KcL-fTa9W=ZOQM32Q^C|wQ`#;v^|;oKGzH8ei~zF1$^IBce95cN1XCj z0e~vR*F+mCx(0>@*LGJ2(??bd-p7vi=!w}hJ8~ruFpfWlxX<-6PF5?TLIErgpA(8t z(ByF+amb z1Sv#KyL9-qn!3V3(XXDxsG4h}hvDMwRP@o{QPUm-?1bayxST3~lYE!0*3jS76(*(j z=sa!P(Tr`|m@+RNe6iQ3_u0+ocM8(7jH{0p+v-rO&fJ(3Y?XNWM|~GYCS;jGTkk)G zj?@{as5rZEDCN;+q_oSpXpyxiMXwF4Pkw4QwISt<7B#KERd5^YA*P=fgHw*z>^#t z8fCfQlT~Oy{+ju5&gbr6ccihPmPI0^80CIUBxd%5n~rLuV}E;g&Q{X+_F1RW({*%c2qGEn4^!`i_veJb>V#%3mwD`7lLI6Y$JXiP@b%l+gd=@*k$3rZx?eM*y za029vPI_4&)94iSI&!v+j}Ww5bRnnS9N>VtVSI%VdDkj*l~^Z*AgzI>Bm(7R&Os&r zC{i-gUNyYe-;sN!Q`dqpKQ}aiAD8K^gSo;5LSZrCr;`x+RGF;I2Cjy2jyuwjlGeeM>1@ zq+b$;tAX1Zx&_@Iy29rT;RpuhJjTY&L~nixG7=qq4$qG3f*!N8Er($S_8Plw#|JF= zYy`9fE({?SdntC$;iwtbo|LW@LfP3P z9=}%%bPOAG>`B~pVj*|T)df;=-*BA`ZWC#l;E<~KOn4bvC~2Z;R5#B|t5_CaFIQ8# zO+{awtaX@uEJY#!@FidtX!+N@w+RIzM*`V4&V!RHMdwlP?C(5K=nHBsPs<_Eb0rka z^&;`Lc3KHJIoe6dDJkGNzG9bV+MpHEd9j~+FVd^O~lzgT{7)ZblsCxZZqlpoh*qfSC}WIRxh z8Z2-Pdo8R}RxpOKx}u^OnNZQ)pr_ATnU(|X>|Ud(zBm<>cV&80d}t0+uB8=a1^=kv zl;DI1{^6k+f+58V-=x=sczA2BiN9yBBU}fu6qeZdmp&UHT_uF(e?OdUZDLgxhctVr zFXWd{MGkjtywd#%v(TL9HEv&QuKNmwd;0KFbw(tnVou&LE#*R$p48;q5NTzHYc@)X zF~{g%8SFp;JAx9E`3K>uWK^iP=ymDWQMbQzj9Zdci()km4PN$?$TRX88e9_6@)>!# za4Q-3@bYYI&tJ;@jE^@H&DN_Ah?FH4&E`gdNBe0e^5FPvIS&fu7vkk9zjk)STE-16 z9?f(K=zDwh@}E>`#U%<%*>C>JaEU3NKlv_C1Q%|?1r;V4Ow>S3%vY~&nb=r*&rkwt z(%?H*Hld;)IsRkvm~v(Lo4+Une|W^S=3gf1f7!(PZ?o0@zwQVZOBy|Vk>k6^JONRV z4?>Jm-eprr&7ahc%-%i&)d4wG>Vin|HpEzmZ2VVUQ{Kwfgq$M7=2>5i=5@yUf; z8Ub`gL!VjNP)gOzf3*_s{VM*v8&lv@oJ=QuG&dxCORZw5Uxd${yA}p2`o*(R_v8i` zps==%PpS;hqhMSqqY$IZk&EnUw0PJpOnx5o=N;U{;=RH03FveCpCuv0!>;B%o^usQNKm^;90{kvTOgeJsIj*1bH@6gm29X8nF}MB+(axai#Vj(9 zPRkyRTH4Qfh{rD8IY+fI0vzpYA5qD?{7tx#V??sSWO2*WU^8qWWCh6rGq4>21PBZrNt1 zYN7xA?_B@N?`IA2LcCS6@dG-d@=KOoZpQJdW|B_S&D>bYZ%+yyQhTLj_R<`qNS$)EK*!t_B{sh{PlRsKk;$Cc~mlYGSPo9g(ou4 zoQmO76n1CGDq1W-e-#3?p?IVRZs-PP^iWu@7(j_2m8~|Kx_{D?n$=}iZVvUf*hf2# zKPf^J9>MSWEl$8!)h9w&1@|^7ChR1mnUxy$}1E`W5Q!Veag$=C4MT9oMT?X^FY^UQBxkrPn3ZPc8mO^p{22b@Qjdspn~7 z6fwZUr)G0+=rZIBE9@OSxGOelBcSELET5pamM!R*j6dp|~+8FT+`vY?8Os6kJQ%0f|F~8f|T< zYV)imKjxlC=?k^H;|hRCdNpT6!Rf(hF17^`(}I&ib=?L>q+IdEWeu4wJQ;4AhlIa7 z-~ATnUb}UJY>r%Cj>avx&tDjbIii2aYEC>JfAN!&q^Rxhjk7$&p4B_;QCK6`pbx`k z@uJl8WHkXFgYBZMqyHp)+Pce*mggBNK)TC-T!LQ@&E-h_K#tn#k23>;^Opq@DYx~( z#`1t3eX!WRp3hdbgQRF-masWO3_oMM!3}0wT3bgv!p8PC^el4BP2;%jrQp#uNvA9T z05CZf<`blHaTiLbEhWA?&X9P6d}431XYDpn+@K;;@Nn)iwiV1Fpn|?K-!**IPJR6A zA+Nb}+y_CX+4>Y<34mC&`JT8srnR*F=*GmwZjQe#w;RC*i4JY=gO4Xfgt1qM`bG2E z*s1yHza$D`%Uf|UwrO+BX&8NRV&iEylvE|pGBRG2j`c)d(eKYBCS^kL*VnG*7f7&- zoK{EIffehvvW5T2i|EtQPkk7CS$BiboEkMIZmR0)6k}_SdPv*M3KCUD7rUX5o_|KT z^c0Alcv+`IeJLq9HL3OF8ih3{#aH_?yNiX=rGz(63ej0Ao9bVV;>#jPEASY60jlbW zePp`B0Hu8v>FNkm=+bP#RIc@J7EGuT?(1u2ruW)>IgG`KYbTvB+lZY#wXC8ub~+Ut zzxYDhL?GE-2(F}o4L^9+Vq92S$JsF^Z!`WYSb8%ympzE3Ei5;V8q;HeTcur{>-(q& z@JC?pfCRrwsrfe0ia9x_I}n$up!QK%dojhxsP4Vdw6B5Y{=%JifX9_ifJbw1O$IK4 zgyqzjlfI7Ty@?6LM7yqch?=E9PuGbc4R%V$KqP1u{3oe8c80__fSoxweyCkQBN!PMXoLkD(Tm>-DYe|RIr>A8NAz-@Pwq-a_HIK6nvEsqWc zfd|t^Ck0xEhSEFD8uxCO_En4*v#AapPpYSfHPQumO#ONXX(m3U#g2V7AZ_Q6HGJDy zRH)!@!i+~=S#u~5-z*I>1OEz5BQBvt(&gu8r&&NXK}H?tQYBilwcN9@Gkg#0eS*!# zy@CKM9bnvZ6!%WJCA)WR;*`H&JuD(nk-FRLZ->;zDiz-JfPTc9odEta4^YB7vfsSB zrw(Z1GIs2ZP6G+B=Ut6>2_@G0Jjr%pO6})iKFf0=XL74){SvkXOFXGkPsUe;e^&T} z9{Aj@Av2&(WRX`dh{m&oZd-DTE_JVGhBp|MCB9}(g}*^|u3roJUhql)h$=6(33K~+ z%e9fIBt;t7jA^}ei;kT$S=7Si`2ytwy@kJEcFLn*y1tWm&=$irj}=${Q&`kq8(_o} za}K6hi;HYRx%;)JWxVJa>nV_$HdGm`lpjsXxxOR*=~bxJsOgdw%O}dT(y^7J(-{|y z&o3U{*WD*67VdT(&PD@HoVGy2o@9QuUbqXDHA1#;2rm_X5gY%r#TaM3X99+>c*3ia za^ke-qpcrZ`4s}ftIEbEyfz5Q#MV5pcpv+ZtG6`{m~^|omK*hYoye1kow>!dZLre_ z0OU*^wu=&Sx~7)bqLt}@n^CB&S91aInAP_Kfh8Y*Zh^&9L8lyJsqmz+JLPs-bCX`@ z+2#m%=D}*;I6Q#?v+b%x{o->7`PO+VfDs>X{(drn@GFfC4VP368; zU#{_(Hwz+GXJ_k_)#KRL!RO=2J_`Eq6g>t1U>q2MOXU|<-$N$Aa9t28{L>m@>PE27 z+2$0hhZuJotoLEc+YakddzO24f6HO91N;)8*YDBeRyW-X+xDzw^}d)w#>!m1h@=Z9 z76#x~;-t^rDbX|f_o!9TPPdQOhle1SK05P6tJMqriVB+fM>eo=Q6}4|I;Zt>C<}IJ zUqyhhAzMX8qj$@w~){g{S7lO@LW3BltcVsVrnm9|JjhR^=^}+u@{=;(n z-kbd!`oCMIJb?~}1fkh^J*Po)p3yI;yiRXU(;|~m3n_@Hn>J1%PLg$HeQmXUWWcEN zGw9x=AuKAS3fF0*in{(Oq$YJ~Cyjneev=R{{@F^TuhRK)0%)NydqNz~42W>6Lc%Mq zWCD*B-j|7~WaoVFG;vkoZ|kz(D;8?F*~6MX`dU%>5jw!UY@{A}vtRYm6{5OLkq0N| z;m?{;w|6Ihi%Y^Y zj7L@mXfcozK3skTr^vXS(l1sxkj~d3S3g-Owm*sETT?&dKvK?kRHJMHO#@Adf*?;r zshxN^C=Nd^OGd?~Th}Yw(y7Mc^gY7M?jF;?Q>kucnqm} z58hh36c19FUc=V#c|#}H{Ax?7~~4g}VD zRQoUVRWsZ#k(<|);T#y!-rJkhE>X@+l*R{@;jVYUe1pGWgd3aML*`9~eB+*L8I@8} zi;)JKWOls^+9Vm170rw+7tNGlXzv26E0`r>V|sSK=7b zEW*;uB}$Q^BB{SEpPytTy0UT>tl9L(+^dZogKsLS;cln@1!x;3Q*SU389eQ_&6WW9 zgxIhvbcW(slf5=45Z_=qVZ+*HMW$YT7~y8zXQ~Q&-Lzf_1cFZS@?%wT^Nbd1M9g{3 zIriQ|V$9am6_Z;Zs2*Xa7yucx(VLm)!ovaC+xboq`Y;GyW@*g!+<3Q8ogtl2ahs`C z3}SW`O!__iQ=wk?*7*f%XPZ?wp#reudFa4-Fl5iaPhuX_D6l*#lL6KzoU-OkC}Up@ zw|fAWTZ)XSrl&sjjmu*z7p&_yGmf2bxG5BOv+imJ(+<%W`n2pfA=cQUX&Tln_dl`f7@4(i+=(SerOZG>KA3_^D-6z92y~otg zXy5HKeiheV`uNx~7)9YJPH1$`%OV`J(Zn-vVwUi@i2PFg(EwHA6pw!Y5u}fr1%_?s zfWNG_nKa`cT)+_(V~OVx4eUM{pZ?C!;h-np@`lLC-N6gp%rI_OizeLE!$xHjIS zVmD`DSNl45sg8g_*t>U6YaKVD)#Pr&8d@XM@Ljt z%{j`PVv-=`RYv^#kC~zL)hT$(#e@#b`p5d>71{(kj(%YfC`ddL{e|pE_DYvQ@(X=|5 z+a7IL$S0duP%G-VthG=bKu3yhcwry6Q;jILM)W#IKR8}I*OK@!>~n@IEyj~kN`V1f za^2zp)7Dz*!O%v}*4dzI7kVbQC&@lcEYP*mxmmn}YpO$R&wGT~M60_YClr2_p_^sy zH#XCmR;YnF+~tZIdT33a_L31Ik(9z%)-2qQJaKJKT+o+>LK2$pZlf>V} zBu;uBBiSBN=RX0qPCi(~t4%cSwYmFG#SDP;K*hGlC6wjmH0J*F69ijmx;V2`6WQunXjCG}-K?wjCh&IV z*VNmDzfpz~jI73KPL=h4g2Srl{P z6`X_^Xp7~5v`T7mKDNa5eJ9|_K6c;jjB?9;k8%sFFHDAgPwVlF=r5G#j=-L)uVW@0Au#fSFR~j=l5LIMWbw;jZ*H86ZVl^^-V7^M z++Z@YhlY0_cJy>)O-G8O^&X#@R*TVluj!97a3R&te*vqi(!fb%2YfSQOj(UdD9V*4 z!(p$S_#&0vpzKVBeg60L-{hga?D3QS{Nnayc$P#)O15BNMmaF?(dk&JJNFH=6$C79 z5_yG0{ro*$P({>pvsguHs@$$lH!>{H%jZ~;RPqSANY7))#Bzfpf_vy!0;$5@Ye@A5 z|5|_3g+NXB>rFA;t7nM=xHNLJ@kT}2gqU)8alDaVIorpI&s)7TH=B-tiyqn>d&UD? z%9cfquCTok({lmhi{Fzi)6HS)8oeGn#x3y;`v;hN66WwPf%9!F&7|Z; zPG0XCL=t2x5HK?EY<(~AzR!NIl=}U(qu#(dHlV}R?E!$D2eq-X?i=>W`i?>{RYbA9 zk1|$$!$|tx>^k7Mjn}a9OYO3y;e)YOJycGYGjt1YJ+0Q8)h8{d%E}2ah#nh|S)-jz z?|c3{pI$;LHK{)v6P#La1?*ssQ+yz_gWyO6)d?M+^kskRj^nsk(=n~~;fg}!XIUv)Rg zg}VlGMnIUEnszFSF7dgrFDX82)~|2@n3}CHGBM%?!mb>zHo(a5=VgWH{lKAIPUqqL za2s6-mPh%nZrXvRN3p4{?#RhH2gtLx^ZJe`9sAJLQ*Yn}Wj0U7OFEm6UNn)$=AP*y zS;Ue#jC?B>Z8R!?nIibky7PVbeMr%EW7}=1Pb{RR@>at=03H2o^dm|I)fdoW3xus=|eE<4k?m;#&!zRNaS62s$4f#7;%jIH4m#828&t zb=Ly^<~T5cGZ0APP1bqwZ`$K0u}P!m_z#w_@gG}g^B?qN;yYGp9rFb@h8^-uU|h%TCy_im1uZx?+TR#Muoz#eWAOB09kPxL98 z+0;E=ZAi{5TP&TZ)5TA+!N3iM{IVTp& zbS(Yx0=f}f$&jX7UPe8?UVaaTp{~NUJk$521_TRiv`t>W3+yIgdc}0dA{EZ_sTCSlP_W*-8Gp=zi&n0ASo3bav|r=Z`p&My*BFD*NE(hg!2P2Shy(KC}>rWM;%k(OSLI>ZmaK1R-*;rsteU_JM=Bj2WN}<{l2XP4OAk~eqP2VlKrEt zeA*Gvx;qKPJ>FcB+ScEHrcH?uJQT;EZ|oZqvfAx^6WR2VY#~jL;Vsc&sKPh(2Q!At zW4Ys1j;h>KPHB%wtm{x0wfZ>#fv9jYjwI!3&Uc`h2555G_{oC(oDWm~U@=Y2Fl5!= zq=EJN=8bS{-yR#v>*WlA{mGJWkYX`^>^jQ64g-ri_Jm;F3u=UE>gfUtoXPLx9tu>v z$uXsovdWOoU6d#)Z%9^C#A3hyH9EKK{S9Z_Med!vqm`Ir0|kG%&Y%~HBf49g>E;bG z@29!hWuPuB85=Yo+;1Lq*t>qY;^}vCJt~DKzCe-jdoH@+y9+O$q^qvjK9`Y!%PW>& zHwgS+bTc4;VN`TTwOFKj?Vbu4L9p`%o<(BTCyYg)dwo`4X=*1vW27yzLfHq-E*Eg^ zLG#A0bm+&S+0S}-cv|(iydycxbZ`}>5-A(&4Aw%A_(I8`?73qOFNoioBF^>e+LBdH zwu(NPoWA_leO7p+L7&9lqBIz9KP`Qi&GEZEzj%)3LV+bDbt`SR2`^=m!TI6%LYGL1 zG3MJD*o3g+Xo~K~?PJh8ykpubrDtRc=Hust*A`AE)c9J12rZ=6oX$OI%2+faFSj}Y zwPfe$Ty$}6 z*{6yE8BUhp&~XKQ$fT_ zAy}ohZYQ_)EeO-t78dU!*}ZVpM?U;aXWPLg99)W8YzQ8d7-1Lwpz3Y-gO(Wwcdctv zP|TMn-6%W~rhoi3Bt3AqYm@s#m!5@xyVRJVI~;7f_T0t9?3G$G-(kj=lpAZ8i8n`B zB-(B(&z(N@Pb2=b?D69x21XezJIT_OqpYXbDmN?vL+P!shkHDPOP2{FpX8l!+SKH` zSEgDNe*CQFOX>IYmKB4KW^5vBjKBG}$(Lr`Ojns(xp;L3h1xn!mQ9Gn{Q z5z5Cx;a@iAM7rWO09Yji(JAgKS?pqxAGuq-49NZyID_%1&z-xIlqyw?jdpszm+;f= zku7Pa_Q|P&-(UF|3I2s2b(L5zttKtm4eOGuNZFm{U+z_KZW=aVSr90-@m?3gnl;<( z{v=St3__^9U{8^0_$A<9&bz#Ye=zf||D0?w^iP%mlK{Hx2Kv^@T(Z~Xdd#xw#Sg&f zkCy$E%^AlhjhxG}G38EtUx3N*ThE7D4tN7s57oD7@BYiO6XoqE_;I+T zfQX*BYRCb*&2~y=`J=qk@`vVdr_**Xtxg1g-AE$nZ(P8M0E*S?R$S}f40b~Oq)YD= zES@bz{T!CP3CvFdD+jB7e%YOiF@zR(V^Fk0R+!G;!A= z+mGiLo2Q0zxJKExFPfEuHlZf4j_p&^6pa@u(znurhL25QE)I9J=g_z$(XXM&{-9pi z7>D2|VGAsfjMEC96P}}uO3L2RVM|J4PpEDI(%fq}FJJ<;pI1Z}3`Xs_lL*HrOR$^R zKek2rFa?Kw<@>0Vk0X71EwiK75F&?s%WZP5;Mvc>`F-9aHPC}I+GBZ&u`KJ6M`Lvh ztMtuWY*)P{Dn=sYk-i=j%8_t0UTuwiL+{X`E=-ERIP>LjGwG2S9JUIiuhjxddqxT` zF2nZbU;B%H5r>OPMy@1vza^`?RZ5UZ`$qj;O4JCJw5-c)pwwGWZY&$P@z9@AU2Y;^>?j;1kuUrRyWsgnW>ak1AFiO^I>L4TqqVPqigH`~C#0mrp;Ji#X{AFE z>5vXd>2Aqk2oVqj0qIm)kQ$^@T4D(4Zt0;JYQ7oH`OZD}e>nHvZ~ZT8u~=)?%X(+O z@7~Yz{9-?wlU~fc6T;HDrlti?m=wR##6;#mpPk2;(DJp-_tbl+!9Eie4gSLJ3<`mt z4@RnduSBCTD#;L6O>MUu63Zfko|86S*xQXKL%xlT5S-C%Tx_2o z%s3FvCRgzZf1){G6>VMYl>}Mnu92and@t&=ldwP!-kL~nFVJ((&*?irPUYZ1hLsIIyh5GttW2S5UQ;1o+=R^K{yv)gY;k^9D^NUj?zC^h!Zf+?O#8Kr3`!crE#fBh+IKhaoF7v?+9sDVV~YJSxxG&O@+;%LQa|HPPO`7HZj5X{ z+wg3gXO?n#YF;+P037JdpZ%zk^7y4NHtsu`x5fOcz+U&VHRoGYz`EOYl6ut4?HpW< z?JNYj9q6a)bhQ0hoj7=mN%(V2Q_)iIAZu3RLfZ;bG&4hS5#aK!Z=fI*;7;f`~I5%*m)a%9r@hBsgL`Z}WMV8NPX-X7Sj z>gIroi_SzYO!scYd%T-@(6bBD2r8ij58C|?b+#n7<+$=dP-#?d>FJChvuJuR-7^~U zQHK~n2iOcWd{C948=_V9=(BXzrkwsXT+x>L-1jzmY-1bXCk(NDjHrM-VzrC|(om@7 zr@pX2WVQ=Ww^>+dx1OrBL(AOeK=kK1f6<7~g^cF(Bq6jyNO(jPeRDcO&P=I)m$Qm*6F5X(ekF z?DRIa{bECos>a6p3k&j2^ed}2wV$pW+jUQrX;TFgr>HbmtyV3I#B83Z$~CHr#m{Cf zu9Nz3a5~b8Xmgmob`cH%8jo&wA0|~r^E=#C0CS*7{xj-YGm{=-$!Hv72mf;{qn<(J}$x zctVusI(yE&C{C8nzz=5zUQQSHgdgWiwG_t{iFmVDk@cGW+EVg#9&qcTrj43yW|?K4 z9?Ik#nVIt9Vhtk%E2FMSh_BIQz|WBu&aRbD$bu_hIS+BtEo=#`<9N3xF@ZPKN+)aC zjMAhc3iPHpSRwIIDHpml#LUAE4Nz+kf}?{qtaD-jT%3eSJR@<~8F=>@pVjxJ7Pnfj z+>Fh+F7%gB`=Oi!n=QEvMYmFTwnwLd`0Z&(CW505VNq&c#h+aN7z$DmF=_kWZ`rPn?3O8vXp3OS|bFira8`f(RE(TurL3y+HSe zv2g`h*3?sP?x1Ym*;FqNT_1=PX1+tI&^h`%J8!F5eOgYpehcQ!Ami(Qa%bu__5E_g zkD%_pqE7s=)>RUASaGwGNxwjmKFA+aiR%2kGi1sXpjSsE@F+6ZLYA}8QmiAKo!}L! zj<~1;A?=@%&q2wm@#u2xB!80irqBzkh5iZIjZt5aAC7p+>QV_dSl`T#a&a3w7#Ea#7~o-1t=N#t+0Z1m<-Foonc; zfkUE`sgc}@mJfF9kJqt(F3L{o@$T*_>M!$NsqXUYrRzX`DmpsjnuE*GFpa19bG?`w z-4|UZQ^V*`>cw$e*V|D46^v6&DvCk%6zDge4;`kXHmA;*v-xe13#i?zN^?%bN*uo= zjPE!$mO;M<-L3o33u^pP|2~hU>3$!@@eq7~<23~zB-=y{mOQC(up0Anp0$yE`^()H z_c5)d@sqmKy;He&q5&-dax1pQOhr)a8W!{pv+Df2w1HLg#Qn*t`ld7%a1~4bm#K9b z^MVC>b^Ip5Le!kl)1Ux8AdN`Cd2ipvfQ`lqBSxMts;Lmph>bdUU(O|p8X@6`UgOF5 z5NZO;W}9fTk~J2s2JQ%MPqut%adcEZ`_(ehHv!qgQDdx1817&O- ztVsmrSd~B9SHUf$(d~VoCGf}2qVat}WymT6g~wvvI9c4W4hj`{=s_1Gjgdp|w&n8uW~9 z!9C92hp%Z1jnevZH!=6Ve-=m-E8{p0Fhzab^I~)l$K|&)FJ*om^keQA_#Zn1`g)@i0WgLWrqGbn`OV%_hZsmD81v&tcaXWMQn)? zFm!#k>0s?cQ!KY*9SDtP=%XLuNzO)IEJcMKS8)Gk_0p#TQBhCAEK9!@92852jTMQG zpc6}%@43Bx{pU)OudHS|S|;;kpdb;IC*NL2r0_)0+BPm6?JAIoV_5=SOz$TqWpE0b$1|2cXxpP1bZ%(o=MDxRzh zGVFVZl3L+eD%K9qTecX|XH@tAk<1_trknN3PH|=xPQZY8muGs}STc?`!_I~6T*N@ViYuqy> zDzvdZL}BT=yAo^70Lyk0M#0P-4jX>tG-@RmGHyey>$U9Gyux7mRP<7Q1l#Ru39x(} z6_#TRG@dLv2_ge%Ge&VF%CTId-)2F~CH?)QQxGVr<5a?8ziZYuUzIQZl;stq7Lb-v zhc(6}I1yLOtZEd0SkQ9e?{h8Ohql$H{_(f;R*hxmAo@-=tgZdQk#dO-#n;saLt zO;ye|>kp&oxSZ&&ojxC{a0z5NS;>XSJopRNCAc#+pXsOgEn4j~dGai1@&?+CCY%5; z9y8R8qGCbp7)uO7^wKi~y@$lXk5?Wx*5mH3;aP40Bi8lXY-4GS-6OGjH&2%;Dj%Vi z+S%!A+!g|lOl8XJJm?sy#GU+MvEDYDqYir6__dd2I;|#W7zstQC2Qt=W(2D2g3Wak zh$G0~hFK>k5|Aw%Ijp4j;|m_n7%H3Pl+Jo!ow(aR$wy5b38gtDR)_yQJluVDk?J_6 zrxxz8)VR}hFosj0u-53IYTrs(dF#T(>)zb+L-k%1(avs>1q{XSDRX_>O(K_ev0N#1 z1OoHNa69Fr4_~C_CKG0zM<67C>-{;{WMz{3M;#@n-b>m!u|bw^hL1|%GTP0RzQiPE zVAE~=+@=$ncq&pAP^>DUSf6b}xS+D8e#lijdsy9tqoptzq7!spQ_DD_)plt@3caPd zvI8)r4TB2SZojw_fp}VvcH=#COD}P*E;i62X#}_lbHS!1gk=7Xggt8@sqhd4an{p0 z$f2Mc?-z=&4Fx*TKPkY&;FXDA+NqhG2V`P3%pX!)AQEEc~hRAr>FB|B}WR1l~2!C6i=YI zC@t69PmjrJ%75u0s{812YW4^B*lNWwFf4Z)8|O)46!r#X=n|BLdq8lHs$tNCXB1p& zN4Lhj%$bau!!>xxj%=9FKe+B*$g`YSNU0ccI$IjDmn=Pf&SfsDoZRmXJ6woTCZDqr zV2xGQr_g#_M1CPVT7Ka*#?@0Y(}*moOH@$*tp0F%84$C41of))6J^N4HyRR4DZjTD2C@IS9M^h zc9+fqYgAT>tV@iLK&OkZRz46R%^|Adg0Sj!8s!;l$37sC+3Z7m& zEL-9vy?d-#K!(&}kU?8D{}s_0tV^D@^_fCr=ih*W&GdUks*o+1K8`c@Mi_lNwF^u>U4@8LW+<7`huSbx>%UX`T=6X%u5&OXOl z*!_m}1Sqd=pEO^i&rb|}C(JkorOL)bY@F7Ry+bm03c+f=Z-F+=tqF3MNJ{N8hJT(l z4c|X@^ZW7s4|s%m8?-4#uTX-aE{)>M1W89wcL!0^@Kcv#lC9Mq zrI5>#Pn=6<(7KOHNOXQzREdj?9!$Wa$}cjS5oQIv-=g2$*)ModoL=U++wJY)9FS-+ZSKkz3wsj)tII$B-bkB){Bk9Of|L+ZUlZH>y zYNOlXchKEtlD|RO3rmbaEb38m#b#zObqB(#ILLC51GH z63eiuleM}T+WNhE=W0r<3nAq`ooeSD+NI2iNQh}40v2-nDkyV^bvROCRIb)*B+*&r^rDC5) z(W?wLA-~{1u*Zs~!8|Z-ifns>Xf;>3@i=jl({WciE`?ms>&^%q#t70)0YDkj%nj)B zF28gl+4?pWRJmX?(+NzA=8KU9CrriM22U}GvW;Mq-iZx%R^sL#N-RuEsI?X03r||L zQe{Cg25X$&$~P)i?4q7~-uP@-y$)#vfT_(DwAyMLiV+9XuJPB@ehz5r{HrDlnJ}n! zqJE8Ax59jA_Q9!~+w=)zM25kFsW4~xuS=Gqi=b?eD@ z6m%9+q`ps}h$m@Nl?wzzGcNhA_K}fB(t6U7mr(zJf~)sD(A4rrI)xGUSO_vrI7+cI zl0>Je{0J0!ltB(DYB%lfx%l|l=RkqByDTd9_y<@jingUqCe{1)y5Hy-Lfa^4eN@1H_^4Gx{XCz$^3AK*S_f+ZH|nrCZ2 zzBILhit1lV%=blfGuBsrWFtUV(WAf2EzT|NJ!7pGZ^BrT?q5j@5nbNVYfs(^1V`eh zuAOcKk+W;5z&|oz{9KMd|>u-SQ!9JImpajMoEVLxt zp-EAUFL6Uw3!xVxHT#bFJwpFI(P|n`GR2L>M3GKUM(cZKD8rPv#ZE-kM5-gYvGlZJ z#$fCEC|feO3IT;}Ow!Q?Wx1F!OjJi4>g9W?knp!DI*rXh2RRZp?yny-q@>-2@oD6G z=*YM$K}H?*AD>FKQZlg&V`l*`7~DAYzh?&kDY(6v>r4TO1KYxi&BBQtwYwXDv3@wk z7bn*cEn<7wg(==3qUO-~6LieGMX{ASAo zZu8&qb%gJfOhzp#>A(S!$99lKjjzH#dF2zXo20x`jIz^|#mq&hF`~Q62VUs3AeTj# z=T7I@UKK(#CW-worl+=siSpZ{E54b{&LvfMnO|hh+E<8{r57a4E|T{5{OdHa2g|@j zZ{7rRrz%r3$(e)YDWMLjEzLxzUdZazMf1fIwfhq~l>ro`dhrO#>_M|sI9aQqhZJ1EQj?hJ3*I13@FsOcF zw$MS}p%r-FJ+l#)>U92qO0AKp_L)&_$0}Id+ymMYNTF2H)_(ETO9!9rp3Xk6 zgn8erFZ>E0}XZLxw%pp!h38Q!%VQEOGka)sPQE0@x-n!*~)gqq!ebHkn5$eec!gDv) zsgqMChr|F;gON*T`HRJ)D0{CRfs!ANY@(upTbFa{9HRAt_EYgnC>0GWS_r0-jy8&{ z_0&CqsFxXCE_o=eYlupGdcBwYOtb*Cgp4#`zfKE*GP4&2|KxxhyB&rwh`AEs0LkK% z5iI}_vc^_L2=b0nQvamhUw`uN_IYg<$m*rYM=c+rESO#*MHy`kR)(H9L8V?o-i2duc3Iuzpnf}QM@9u=*?3inMz>q zG=(ErXs%<>jvU~BO=GJn^5~7`l2&j?sHdMn&?|wn(e@QE6@F!~7#lmCUD9rPmZ=5|`mk2v?ar}Jn z)%{(kaWz5d2COqNW8$N9O}OWxBfPEtyvb^zob}ZAHqiSxQ{@#~-f?~3sgKV9LTC4u zI3^OhI>mPSt3iz1pcFaBwJG{`Zz{htJ-3_x51Kbt`l53G%}Rp{b9(7aa5V>A@N%|NBFkYyb~ukNj9B&fUBg!4g89zI z_}^mT)V@`vk}Q)lO!Rgguf3s9oF+i;Z;V|F^;>A84arh&a0za!MWEDZn17JV}V2!FtX&v;LV5LbJz-x zs9CcR&)C!4W9!c0aT25<_s17hMDM8;imBcmQu2zxCsnU7q@>hNqrMK1@b>FUO>)VS&VDt?gpzA6FcDVl;%P*ewa~_xy;3_ z$<_K+DxW|CndToB&-bw&(Fw&Z`)N9jJGk&^^9oq=88)ldUT{Hg5|<@Goae{di?f!b zL;OGF`a9ZtPmSly^?=B55X~NpusE#*PydWJ`j<2f5wvKur1^h!VjA7*;Uu0zFEw;S zu8TtMfA%=7+}1F>_5f>8ciz4o`mR{z>C;)=b-nx18D+0I=|Am$9=h(9NuQ90cDH6< z=g%Ie;EDo+>xlM$4}^c{##9ZR$c{i#Y{b8}2vu`%d)5%>@?Q&iLP@HJ@~KZZ({^L$ z9z=FT6M1?xLKL_-8J4o~|K9F%p(J%dI z&5*zP2JagNHKo``F+xXa>5T7;?m_B#f3f67rgv7QH@6_I7hQ3RNMxd#ru_oMGtbZ~3~+yriGVmR+78z$J#@3D;gXag!BFN9M;dqyTEbq2k`Y&#xHx!%m<@GOGr&nI~1D<&g zb1j&cC%?DwJ$V2Eo^Z~l)exV|WK~x=Gud4E2?kO6b{A=ny8gsux5Uq|D|)(CEN~ai zSR>D1P3xk<8-wEZb>)qGX(Z9QA&C!CXg1Y^Ui`}X^@I{}s)I4v+SrXG#jV+!3PNl@ z@m+Js7?sc>dVR=vQ5L`@qpcUrnbos>#mN#aMUpw4KlHB;2+g zgRl-@kh{J&_?fdKSVboDMt;FN z{N)2gkmN?0s^sZY+xx0{m-Y{lR9Rn{%IL}A{BJhdMPj-Mc8OQ#z}R&sCnB%_9pS0p zi;7Et*6QVZ_|}Wq-NG5UESikH>HERRCR6kz@6lpzcXyQID`qjaFF2{~4~CB&016Gs zJ5s+k#z><6TjkOpW%;7svjH;>eUU7$we8rOl{Qc{E@hXl>pcbCeo2=P`J_9I3eY># z45O_o3Id1G%BRTfFSZQaE zf$C&*8BcZL%HqD3T`-Q#`}VVdh=*2#E6T6F)Z}Y`F$@;zCsN9&;H0EM(3fv{_;;{K z8$~m;SBq9^pPIsUf5jXDO1pH=aZP|jkGs;@`%O7??I(OJjpjXuToZ7;mg7u;rQh;fn-%Sp8ey$I>c7~o z!4u(c`OOdGe;umxfw%mB+psYLcwW-Rm<$vQ&C^bp`|(D%PDCB4fe;)=x>_xY|R9s>>Gnl_TMVhgq1et4#82aBl3ir;p5NFsXjQCtf}~1d+gg?6vU6*b7iSwN#nr( E0UQjieE zKoELiVy^_RiGB#p0 zX1_>*r`WWgx|Me(n`t)~g8e_9vl1o9+bP6luaKMQCyS|TP&=NzG=8S4+Kl@};04zyAQi&DH*-D{4 zQx<}P6m6}YdhXKNu!4`(L@L5-I@_dO zQT^EuX%mig_!Z`N!#)!J5c`1g_nXLsgaFZ>YknWGlZ4;DZkC`CWOTUhId>V<&X~F9 zcMslLrVNaYarJVdZk7{gfZ0-!l9NU!B0&kLIw2g0Gs{z?Ekcl(!Qs*4OIRG@qMy~q zx$^}G89ouhg)aA%7f(&fQNWsDvigT(AZB{z!0emqKs^N1RT;1F!L)XcE$a#g@k9{5 z2+cn~aWPTrUQ6w4ImSggyFlTvoe;0#g)YMoj;Wk*=Tv;#;)1!nGOP7ec7UTyYpycf zXi-XKCVb_(IFOPY+VYa{PDa*0CID_Ng)KZs*D~oJWPjv7qa#{xNeP9Vh9pH4qA2EISsWgkoSgFXqesc=91m57a@ajoSKhL z__cLIO)%w5wU48LUY3YB@9p-V841LBgB-@I${sr4Dr75Naejd|Urfm)w%#k_e(O62 zJqhGAq59#N-*>w`*Dq$Tzo~1_Pn~Jmco@W<_`KoHGfxgz`@DBR(w;!n)onV}Kq!mN zo+DaKmP0Vwb9A$L!v1Y)LT#nWdqy5@;{GL~@KwG)lr+u0&Y zO9*>I6nGS31~n|x#6TGf2_bdbHrt`N@$I|UQGSB91dLrV`{C*Osmd;AEmj~fgPcK> zY?^_vWXMO2RXlp80(5e2`tTGhP1oMKf^bM<=_kTr*Pc=6nBs zx-6Nx9x`#U5*Y@pO*UafEQQsx2@}cfJ|EeoVyoC9Y?5G^@?5*VT0c{8`Ed`>vrTRe z?YUTQQY>l;4n4ax?e?BHZSWqLSON1aRHEq#x=ROfT<>}l?k#-bTYq;rUxHY6RGut0 zpn8Flva<4RUEl8N^5txA2P4vH9f_2b&4P_vHif0nuxLfEWO1Bu@}%TpTR4=Rzk}Bk zf2(xo0av$-3>)4x($@L2S7^zXF4JzS1~HNhKrNh*yGj zjsEf!H#R^S*TN-Zk8EtG0CEn!+r?$M)S(g`Q18FBe9mdzdPQC6ga=34czEVKZTHaJ z!u-a9NwvE;Jf}>K!6qk?o48FitVg?=j>vfM=*e@n4|i)0V$2oh6;Zj$?y7f*fMQ0| zOilS!FY5^+3irYF!?;&%2IQ=zx7K&?HoC_4ru{{FXjYc=ifsOiMc9n0LJ=smu0@2s zNn7?91DanVwpBahX$3R-9?3dgW7gzXn(b4nsyc-hk}!INC2k zL{%=FN;=*V{D>T&%%cIkozv{Ky>XVrOoB#N*9rG$E!R!kQXdKBLL0$+=^U87$MBmF z4`pXI`3K7#MTL8@Qv?!;Q*f6mt!acM!1d=TYur8P6NQ4{CMhXvp4xzDsF_jdJfgh~ zRNnp6WJ1KFE-)trlaf6}5V)h2nVEXwQe6c6Bare^-!bBhY#WGtr*$Ws+ zg73v>O}|NfCZqDr%0kilsrm2_v*Hq5W7khB7$HH{sVE|*D(Q2k|H<5f2ss_s4rXQc z^<}P@Cmfy#C8&CFV33Zh3|HBn1o}ds!=8D-*>pC;*PEYsBPhs^vuc3G;pvd2mM8v< zCpz$QH(F)A!~vf*1EBK2la0x3cQZg%;&Mf}x@LOUdb_1)<>88p;rtjiGdhPo)N=UL z{>*C;YA#X|KbWay(&+hNjd_R3<=K%|x{%F?V80C^5x6xjPd`60BJccox(8^4w$bTd z1F)`*^W{yLVv~q`anNHI54Pl3;cBX0-W$}Kl?0ZB0svVQRCHwIcj^%-y#{1-Y>+;w z%RI+UP)^O(1FEK`A14(Q42|}bV>THblV{5IXpM%R(p-;T8LyWfAVch_E%mI#>Q5oj z!w9+&o25Qyu}3^xvjAzd#>JsP0MyCoJiWw#+l_Ayc;#(Zm_V;5l88{r-cP(f`K$Q7 zMx_|%7j1LPELJ3W8GW{U$HE+=@QufXK$l~GZJy^rx)#$0Jh{~L>72)JrV_=*=vA*H zg5`y9b0a1z`zZ&&WAodys~@C4UwPALJS}nP>_b3|fYD?S`+gXA!@?DmL5{9x-~oan!#!AtmH;c4xSU3HtXs zQWKwn)*ahS)~g85%=&`>F&Od~Dt5eGA5Yad4&ife1~6O?29M^S<}r?8kH#(Q`>*=9 zvL8+>Fzlx@msx9QgJ7p5`N7(kej=Hmo9}gA(|zrd*2N{gJQ2JOOX)8;?$G6%i&r}z zXwW=BSCk1Nh4I|0Z@HNRTG0BPd&x6>n~>_`d&A*Omg+99wJ?=v++(VIj*W(Pu5!Op=y7w6*&*l1 z1Ycg5cw{#|Gjm@wn|Tbz8myWC^x1Phw#V47{&K^6OWj9xX3{Ezl@m&(`|5tbQwvOr z(|o;BzHm6YY^0&9L?$2z==`;LW2R1nu2F9xrdmh;*>t6fv(SCohD#e04)T4k8yGBH zvFYS0tIAbsaHDzyZqed5dLu27(-hp-QO9Mhxu7!Wg-Mb%D!_?Z;|5(&;eznEf336O zNASsRo{z~0a$n(FJNwx@ABQYFjQPpe76b!ZmBF<&?Yx1b%eG2?5Z$UV4=0R&sn^RI zyS@EJaivP9oTg^q84p-L@dZww<(SV8R;Jq3Q6C|hy7`f5zBb&oxZlRP9erdzA(%6l zfHxTZVqBTt1uSj_AN{2=CA#VN42ml zwJrLE6a|6r%;Djl%~iV*VYnO&A(Aoj$5JqaA}fBAo?j3z<9YhqmU+-En`4HbNEx5{ zL0WHiZ=WrfDY{JfM^E_kHW=9SCbr5MPRBfX?r*7r=SmI7(^d}9_(C2;Cml{vMl zw7JxinwKSg%d`WRpIJ4jp@@ylme!qr)$JV^^(TxoPF~WsiyE5=SJ-+L>q*UTyy|Dc-4%m#zjefKwW3D!HY&Nm zt;N1{6=ThFWjkLn`PRa8YN?~cl+rk?TJ0sRanQv&yUY%H`i8>MwozMlCj$?>*~)gL zc@nt0u?_2~s%O7o#N7=>5#8)lg%_QcS<}i7fyUs zZ|TVb<0)rbbsu?P zx`g0-Vn`zswjI@v5)pNqY^6hZzChQdUzKPQM*IRP$40OI{o&QC=5do@@})VdcAxXs zy-;PLaX3M{p3CN;LoJyW1synLRQ~V;y zSZ*M?%B4#1jI+X}{K+?Nr9Fn}g7M^ktcHzrE7<{In3zXs}XA5=zO=w?37IljmxAcbBmS04@0-t56*O3i_pph3@tdzvU^pq03HjF@w#h1{LAG2Z z)FJ^LGILcErWQ?iqLvlYq^4kJ)qck9%d(o#@O;hbG@4b@Vz!q>w)@%UWbB9`C=&9e z33cIatZ*(i#WWdWZ3h~K#^wRKjEdb}utcylfcBNE#xw>-V#oq4VLYGR1+{nqn0a8O zbd^Dz86QaT1xTy9zd+WpMe6j|v=)YZ-SrH|DNh{k$(XL7M$TEjL}PE{+tbxO?a1w$ z*r@6xIet$2`6)fb4*R1ro?lg02LZML-8QL5Kjy%x$hSRoqPK+HaXmACsX zl#g#llK(&e>dGgK7@W$*X=^4>K})K_{fX;`mzCX zmcjtpFy(%0PsK2Yheos>_Rob{#AyaKCx}rPv?=E%}SQ~DnJN)!0GDbh)ie=W&Z_zJ5%0=l&yLDdi zB{)R-Jf6;b&w)#FC1p2bMu*KCyyNIbQCUk!UEQ?wLv)OK)ac-8`Py!=cE83DZ%Ui~ z;#@cnt0Q(yzRcEWlrch1S_gZCITSOQm)vCUR?_RBGT-yPaQd-B;@a-nt%_>cD5gy} zh1j)~`goceIlT>*%R;*i@Ynv2z8zQ^-| z!|Nn~fZ+Mb{nlMjTDS8!jn>s>N?a!W8F~73A;fn&uln=d$b%4uP^#<9_A8uOZOp>j zh{-`!ZZ3bwz5Xr?%?qTxS6ngh-o=^Tps<>NU1v5K34(#$aG*dsNWC#qHa8CLqyu}- zT)5%2i)YT7Mg)jZ?PX8VU3N6GF?&>yz*T(50L7fbd|rhhlGOce*&L1iI;?2dx-fkB zfUHH3IIGzFoYIoV%mQ)-5GGEIpoV+2VQz}s6Q;&;m)4FgIAlqGH6(jdtaF`@FeBD| z-`@kZ|0(xJeM!`i@}h)FFQ@Xfgc{E3N{IUIet%fECTbDLZlBggS3oGiDiUu8=UnPI zbHHg(?$c<5?b)YNXf;qck-FyKjaC(>GmcOl(InvRYkA4o@13`H?Dw=-%m8ww>i)Va zI>1*lze+0ceP8WXI-tlK8}HjH5)P{D`_h-o4WX-1OXdsN6QkcXqvn89iKc7|4)rZ_l4I6kd;tws2Kx z6*oCE$DFmD&7Wy-@$)&xys3#Vp0(VZTZC>fieOo6f8q$2#dNS-f0<;@GF#Mb-?nx% zQpoFLqFCzlZMOMzrTPi~Xr)kkVfL1TJhraydpZ7&)7u$W9dG;jV&Yf7J`n)TVD?0A z=N)V8_>0<=gkIdAUR9>2#^)v<$@=OiW4s4c%#?E(g#xl>L0qHJD5#{z9EE<)TugUCr zrM>(ez0Co$=CD|+w!An}YA1{JR;YEwnpMV@axc%VSEqp8fcA-R%hVojX$dW|YZqI= zaQ-Tdm3^JAYgKmZ!ZJ^8NJ^(qQ0-_ci#=;D*X%;@go)J{7oA6KeZ)ijPQ^8Zhu{1A znO`8ud;I`IiQ`b#vF3qXf^A|&grRBI$j%sMB7PXvR*NaQVaksedld&bd6u40`FV2L z1?qylO+QY)>BxXa_?>aJT~GoDyptmQxhWKcZ7l8+gZS}8>=mv!=Mvv%jZTB#pf$@z z6cqXS;3z`yvMP}b=X=P?%BS`92SR^>t19j3+t8uQxQkgs8S5Ic3aRSwQ=*`2btAlLhoupkjY>1?sl`F3w>c=t3A zR{uIrw`r4o+3VFit_ayBuPNOg6VfA)^p~9Hs@R+CqN8Ic_b;*2r5Un;%<{!^C-X5) z%<~Hczt%bkEmj-a=Hv^*gTo1P?bYIGQk`M!bM?YQ`wppoF(i)v`XZxVx?Tm`Y(xkK zH4lVNC{^r^dTV|p7^zd4bWwIRU}X_W!)49!7c1Ht6YRzP+7?w-hBgva>E{kJQ0+-U2MHOG*vf0IQ$y`+lL1WKR`P5GL#}TU1SD7|>O9qz&NrWdIQ3UfDh*0PlaP{C z*k09PPZy~m4klrnraFa6rUI+MGE>$}o+D5~m4{PCZ$G8H0LXtLZHgSvHzmoXHR)Gs znANsUZx}w!$>wvYag}wo{PKK*GSe%$JZnAewbin$?t(N8|6pfUI~Nq5&ymm(Az92l znlA6;C_Ed%4F#{JulnG6=a{oaHIjL`H9xdkeDKHCFKZdT8f(HTtL)@2DeJG1kF1FiUeDI;_h8Q2rQ4M}rLMfNn`SD?YrjtI?Uk zp|L_H%)&6#^cgxiA9E=F7i;kfd)r5Q>#cx#v+$gsNJNpx$292l$vDRrsj7#y-xaVU z#y;M>aB>{s6dpaasdiycc2z|o>bx8@^!KE}hZ`H(u@GN10FO{YeZWb6%fAo%|+uD(Q{sY(}34w>fEffj9&!Qh` z5DFPmY70^_W*|Oh3|Zz}VoIgx?r49C$6_0C`Nj!cu$h8HAMKB0*U0v&mD=#%q6QR4oX10DjmK$Uysb|7D2 zPb5dGeWz{y*GjiYA8-nD!aK9%H}=A+^js-l|F6i_@=&u%=06(b?y zq&|T0A>t+dYb`Xigj5{-0*VP|5ZfOs<*2KM{xBf#O*8$szWdwl@1)7EC?Ao(eT&S( zREWorGD;Eam9O$A9Zpuoe;$l8`YQ$n^cD(nf%TAur-Wl7w}@ zfqNUHr$}0Wyg2==`&MnuuQ>qWXZTg#Yt|_-a?Kn82_@6!%N?kBRjx6wF zJsI@jJNbsROJ*{zflj+3xys40ljXsnID5vbnEj2vg21xtlYqGW(ty0Y(eNA?n#&=f zy}#IXLk;xTE47L>1;m5D)9nJ|BZJ|vz*gq?mG_qTB;m54S7i{o8ehLVs`%vUBhc`? zB(rDAb^haHeGnlRSMt5sNu$jSa|`E%KHWEsmm;WIV+*PFqkxG%s&R$Ci@_v3*CBoY&GqeH;3i)2knvWrIsi53d^=KUzpve;%z%hxDl~s0oUshKQiJTk ziCI6`S14u48*>&{tYJaewzAq`Z86`O0{B(e$kqM@xd^KJkh*X@?M?nz#>7X?*3kvTz2Q$<^WDn8MLsP$nRt*OAR(f~rq6kK&h zENVqbzuiV2-&J&HcviyH56d|U3}69fP(u^Gt@5&t`*%SeD)|sDjyEIL+P?@UsvDOPoe+`$rYUS65YNVI>y zD_`qpju_Kxht$iP-oJB^aF2N+w-hT?azk~w)eNJ4^TbsAOqQbng$(sjkILAw{9R zF+U zaP&HJP_q}T;mAGg--4K=Nh25W4jkv+WzFn2C zvsH=Xba77JeEtDbb1G+@^R(TOoRkZRE21{rI#aj0e4%|DHXiNq&Y^0%*0Mq7q#?WP z04sURiNn@S?jW7l5$ z$ov|@CIgw>#>+U9(+ubrWCidv=2PSn^(}hUq)}pP-|^6M1Ahd)V5V z+ktZ2xGl9gL>BHVD68|=lOIuvbOEs03!#-p^lr&&j7DRExFG2=@ox>0)aggvuDdrD zrmF!ud7nHO_CCvj+$p@?-*u0uv9JKZ1PH69Df+a5k-cvZh}DSq{@Pd?x0wn0@C4RX zsOUJtm~wSx%VY4}il*nI_z0jb)9P(D5#Yps{eB^qu16nj{R;FtqSWJcs}J2u)71j` zv~N;vjADpN9g7q~$veDId~ekBeV|Sl>*;!$C(@Ju?V=9_HKB`_KJSayoQU8UpUi9H ztj==!Wvv#u<}Q8zwI08Qr)}c?>&k$}HSSZZUMOGGwOr*yUhS%K{hm3~- zI7IWM0udl^K$^69S)G!WI|NtBJ*yPPOc@UOp>R8vKswje)5r)s^rZ37U*tHCCe}_q zT`e0B7;HISx;2*X!LO*l3OcegG`l7CXTqoE1&0Jb9LF5I{sKRTBJLIK6&zU@EM)ww z0N}P$Ab{00E+<`qUC*@vo;}yyLiW{|`P5M*w4^>f?`;$w7nt?L79KjRaHCgi1sDZ6CB{_V{InT6h+i43CH=Z>oOCi3`GcK*IrwHh)l2nM8uQ!4*E z)rHnAhV+?Unk$6OKI&xDED$qr%KFw3YWCWGEv2q^c!kbs%6jC)VUa(>=&Q!T@cZP$ z77oY+G8Gt6-gt@(Qr7|h@4&*H=%3y$6se=#cv2329zgA2<12Gr-dpMe*l zPwPze#a`v5R^K@uWldo3F9OLke2l;pxonxRsGtI zg(5bC+tbx4OUK*4LsbSWHT+NMOWB!TU(dMf`o)Dzd#5DhMk3ShRVPk!q3fi2m~RMr zwEY&vr7;b8y#;vu5%?4{8=wO|+w`rU4E;nlw++g^@T>PPas;sm4n*sa$@lmK%}RLX zh6`LPfhzU8ysXKuvMtA@0sg8G*7o+!;VUIidjpC8R+ypBf>!3-Ptqet8VT! z+da?m^D!H8Qa}A6t*-vFQ+S)SzuxI!pZ?$z<9k~NQcU?xh(m5q1re{Gpz*~HBByfE zpNiD8HiKZN2gV0>@5KQWnB9OtruhMCo0)FAN=F%a6$RE4zlfzGNA4;n=AyL@Ju&Gv zY+A3+er%u)Hint+V?3zg6I~B;$cm=OAS$Bo_(^fQ_}v`e2s&Ti^0`Rz&~b0cW6nYSzq~TkY>aELstA7U`JgPx+4uXCHbY z@XMjhB_vu2fY>NM`j4O+(!fc4rVmJXm_E#Y?)@7jjM1r@zoQ@3i+3R~uo5z+0umCY zt=F*>!9o%e6hYAN6jUWnDm47AV(gJgwthJ|{ylwt8PTUye@p*(kH-IowM8pr?BqBO zKYMZ@F~e?9@m+ZEgJEC?M@JDFENL_T%Z3~IK8pW2VStwShq;2pph15P|Ia};O5|Uw z|2vV;|FhCi#}z^*rbjeG;lG7#>!@i)_)lxOph#3m|Jmu}LjNhl`G3~^pR$DiuLl2H zrIlmun924h?df~L{=sL?H>PaM+^Twx8G`l#1O=oi|=sERg)|vI0JyNN$~iK`Vf+gq%8N9)VC1;${32oiTG7 z^GNlC3X$LBk@J&M--CA8U%Z`fR2f)Hk|U&PgAT)HBV;U6Z1+VGlCk5-%mprEuSM?$ zWjadhu%o8W2a1J7e=H>mW)Ne|vGo<}`H6Mta5mE#kQ3|t#>1Cfg#_h;JzkNJ&#*g1 zSd-3~;D_u9y~Gs4sR=uw5Ol;7D!sT}asRY~tC~D{_SmLgUrJOjNzg=wGq6yZ$T>V+ zk@kuG$8Y0$4#q;zy(r;RyaYPi>{>rdp#CD`aZeecjBNN^JaFL zmB^@0yB|(e!O~+!)up4nZ9S+qTB)A3grqOgg&Q5dq~NDY=Upq`j#)COF2j~83ZYC# z*NYTJ$^$nVA3m(@=`3QQh1F8CiA;rNs$<@foMg+)W*xmWk8z;n=$naI@qk?Q= zC?t=Lb=n7}6*OtBdwr=P#4yDXk$&-C_-ocE6VOm3pG+YPjkpw&{gDi;`h0NS#E6FP z0p_4wFqPV61~V1f$1mSF1K{7W@~u$^Tit+8C4qSAU`S#yhVyE2th?D&Dp=V+3vq^+ zHzsw4#()cd-= z55{k3Cv@!rn5>8qo1eXxR4IyQv7qc$ErF3w3tJpbt)eM)gNd%GazWf2JVykEq&&jq z_=Ohtm%>sGAh3*JZNExyW=*Ooc+|di9 z)twzczr2Rvgow<+Wg7m4+t9E<|i z6v+FWDn*j)25r^&s)S61V6l4>87KYFGer%L7vu#Wq+BG3WVe=c2*$P=R%{j2*NKSe z&;k0ll}*j0;h55(uA&NwN3lmSi8E#D012x|8DD={Q}<4avJ*UiW=D)Cb5<>x&Wbm4 z{C0bmyA@CVv(noMv-vMvGx9DZe>|{R&c^Z$@k5YW4BA&!JQmayTg<){{^%nrHCWcX ziRY?jM$6WWo#`^8-Wkn>y(a%Fy5j%^(9o4{why!gN2i7h*XCa@2CvY_;fanLHuUK1 zFxCd^N&{?9pE1mhdAg%K=ug3_2jKB!ggg!q`m9J;oFf|nTeh7N9W5XQE0R))<9QKc z6?Tg%7R$4dstK!74V#%Zv-5WS-al!Yb?&ai@B%0DDsE$N)Hyv_0!XVmW&99 zSg@o&^BkXkxC{{v3};hgbdbv5-nT;7Zm`+Ot#miZ)dT;c44X-4JVV9J?V#n#i5Ad@ zycNNN*0r_8ux+HIY;>kED#!7e(XlnaIbX(T2r9;jRAiMm6{T@?#AfQ8VMAQdO=XQFnXdn z#(FII%}8K*Vm_ot%xNI zPW(vz@Zi)TtnbA*mzcS!7(2*|s(scevlgp0O;1T4|em6*@y< zkj=Bb8=uy6NBaOB>FS<7x5jE&5L1En0xqR=7z${v0&N;JU{>&jaUnrLX|aAca0V|Hgp zbuwDDWl3BEYn4c~JEDHYnIiQEzV<-e;Np4yFFGI-RkraNiN}Lo8-aaVvoy!3lDf|8 zH{zOHO}mh}ES7gnx6e#?;@wdD9#6P*4HvA*^%7;aKb#+aZTtBmcc6U|_cI{L(x*+L@yN#N&b837O{E1`Uueckc#pS_Li|gpC*Vygm>}MLDnwoQ364L(Edn))p zQQ&6mM71|zt0@e;#*5I#!^z;h5tdrzv|r->7bXjpJ1u1%Ja$*8x)>-5eOXiBL~q*)mNE{2!*0N~mY9w!AbOr5 zxcUy*rdAT|=E9H_cC+R+&XxL4T+1{)wIUnf|o#v+`f8F>pG} z%P*8RCFt~tO15j)-f-@SV@bbtQSwD0Fvp>Vqlg((%G{C~b%F|$iGV~&$Y6w}0 z(Lq2wmYGp7c3>wi)+vYaK>*J`V&9-adcA@5ZYq$G&U4_)cJz~)MvARYHv9V< z)2Bz5G?_>Q5my;`Zz@`KHC%Dpfk6`24u&9{@@UPL*5|mNtKQi3YvdDrc0Scgt<^oE z40NbLRnMz$8;s%qv#*X3_J0VS_FIe`K-Xr#PalZCNs+TM42zfSY`dA?kz6u~RpKy| z{+p5znyDfd|C3?q3Hmph;r}av*zOD@D)`sQ5|J;Z2#9;9s+No4{x<_djla^>|a}MC080l%0`ha5V_5}@1VA@eYybHy)OaTo7537pw zJ)d5FPxCl<_qiy7trg43HUc@IC}G|m{|8iUH<|bTO67jfdh&l(sp^X=oSW)=BSqiC zg&=|X{Uo{Q_b1gw$Kd=vO_J)C!VZG;>C@Jew!#Dpysuze`aAQIiWiR0tGM-ai6Ngl z_hF?&D3ull<`U%ZQIiAG_bka)wLliEnkV4D_?!RKu}L#@WB6tt68M?J=dIH>LbO-6 zR#l0{0TS&fsBboaXDiwr{A#H^;hmHCwuN_$O z<65n#{>Zs#!1FhZ0S7Di$*vLO_AfFM=@~biGsitQr z+R#@mT@{R9jUOf;=#^c{KObj*Td&A(t9EVrBH88vS5H>{ew^TKT_7BOWwUq7X>0ae zO=6B-XGEu&$MU({n6sRk_O`?%pm;eXIB38RoQ4!@Te&eIn3e52gE$*!UC{@ucZE8N z!m3skOset(SkOik278iV2ZyLSmGqZTt^*BGXhaPS<7_;>&1k6>aYp;yGo&_R zZF5s4D6{S$OO%$aT^wKw5q(s`-6?}A*k$M%#Sadlj@J)ciZ@j^*9`xiykU)*RYs<0 zUeKT3T53+T=#=4m;T36As!SMD>+b{`;c|NH=Y+d|01f(yz|KE-2!SzyhaiWc1mbCYTR5bm=Soe~vso+Mw9 znvF{w(d|XGU7)^aCO<3cx?LTXM{_V?DHKvI9_-1@8oB!yFJbVpoT*oYf+;ISGS8ml z>sKF>OMkYxFD@EjB1SZal@b0#OCg@D_w4McH;&dIoKp@uapb2DlU%<`AU~JZSn&GK z3=nww3svo+d)Xq-JlOWyRDyUuT((}gTzVXT;FZU$8-61FR`Rjp17+x<(f zUc^#Uwp(L9kdR5L_VQw-LFb*|yqgbfV|~yvHWf=i5Vc5-+oGnIhN^N855+70WS`@ zBWzFVYu@tbrp{FRQONywl=xXL{PW<&>@&+oLV7BQ6d0i8@{d zS2~EMl!li%Ie@@B(mf+ZQZl8Eq)54BX4w(ZdF=w6KS}&@9^R4x*Ll}!PS!Iu$bM=t z`HcYI%jH9L_zV&ixI}ZGN9M+Fy056TEJ$j`PcA31zUrdlEE#iAM?l_^aBYNhzu1Rs zI`L+j2}a61z<=)Lz+9&b*q-;GQOqhqGoi~jBvFiU4OM7 z>X{3gCFB5XzTX22OS~Hb_gdY94Tt~By;c9^-srjp{YfGaZt(GjBWJ8eHn==S23H*>CUb~0C{|G^4 zvgEc~3bXaYi}nksxfH(}8m)n#*{2N3C9d64gmSN?rdR-0YoudFOP4wSG0B7v&+ko0 zOKlGyrPreKxMr&$bD_NC!~t;_BF~HE4vGnN;RIM zlt!C1SSPIA&|BRH5LZl7`T!t?LKrca+Me>LyHhbEOHeiIx==Cu$Jq!KE|0CASe3*g z*=Bza!QaOurh*8bQ?rqxrjqV0`K#+|5WWQl5)pZmH|~^WlCuYR@GjnYInYFLat~6e z@z4HKw5<28W2SP(SlBr#PX@AwPJq9kgL;KibHi(Bd_)H6Z@J2H~q>y2tp~~7K7k&_He{!N8zLqxcNA%H6~X# zq8onD`R~&FBAv62!CeSyhBe*Zx;(D$yq2&ja+p*Z8 zslktlb=i*|lI;@OKPNJ!?G@Ed&k_4C(6<#Z*L*~XacMm#QzK2GR#+sw^BH8@iusK}hJ*VP&G zc)dDTc2agElvQcjo$sH@xW&7x%Jk>q0Nc)JT5UJ`AbCM$7)NF-nIou7`0as9N8Sej zD#~rbc%WOIX!tSzYx^Y6QxE1;f2fJVugh~)`_tM6FV;NR24@<*pjQqsu6&ef&a`*I zd!f^Pw)>JtJY__jm3vN~}5zco^s;-ZWmqYRDprkeM zY7zT81WfxYt%12?Udt{j=0a#4%aMfHbfgA5LTsF#XY}^hTYVd)&?+GIfHId2;XXv` z{pK@AZc$Pv#rCuTC@A{%N^$?loFD9tjKeLy4Hv^6|LNp)U3>O_lzpdhkWrr zzd6!RgG1_gLx+vF@i;fUx-}Wn0ebam^}ZH{`Cmi?Ax=wHNj62Jivc#aVO|WYA$^A->6HXz*av&i!9mv)1|9ih} z%sZuiG=ECESy04Yy&wY6f0IrX6=m$VTm2MqaZW0?``QM*^i}6;QJBCtU~D-Rwag|a zfEjX`AsD$8=bfSrDRUTD>&bhz>nt;;&S>TH-5l20n;z%e+>%6$s(2g+e9vlqkl!VT-}wAomgad}}f{U9FU>2ZE`=Gh(p z0ayCJ(OIhnpwdPN0LxVm%y6}8B2#-zoyK(KtQJlb4qh)Vs=POdE>W}RIlVbu$$3#% zg!0>Y$5%YFc9?12^BVA9zjNIW-fm$ecdb7e>$PO^#gH-4Do!*<>Xq^@w|040I;NiO z@ZC0@+qGTu=CG z9$!^gmnRZT_#1zOTH7Dks)+xW-1dz5t>UZ^loek(EO#$$oauH`1otO4@v@)_rE3-J7iV{igJGr@ z_Wi(6_`%?tbcMFA`PDr|jMq{lhv32Yu>Vh4ug=|S$q4dGea6D}yW^s7>7y?cCsVuyh;cWY@At5i9 z6K0_BHo|?hf9m+baH)6=sFbQX8uLqDb@ZEBO1b@K%t8X4Dd4M|TSc`k*_Cm%XRqwxkyhfB5yBUi9z32ELnN&8F|4#A$ z{BtAnFQH)St8iTpCoN~A{`*TFdJ0vVhE$c6L(iN=1*ciyARpY&|G)_CE}J!`o~A)h-(ipeP**Is5N8kHaU+ng@>f59kH83Uik1YUK@0Oh##>fg0A-|q{>lK;h0 zV(HwA66l_mYQBbiHj$bM`;kRu^mYlAwDYReI(0Y-_5^PND(7SQla|W`5&==PgZYA2v^9) z-{Tt|MX0$MnsdaJ{J+?H%cv^bXl+zQR6>xF25F?bL#08wVUd#3-7J*ulI~VIq`SMN zyFt2h!MS%-wm!H4%j3VKVk1kJWY`I6P2XKWg^t$e5YiL2c`3p68X$d#lVq$@q%693;zTyINJ}>d(91$Uici@VF%wZx_ql$!)5xIFwP6`SN z>Y(qVCLJbc3zLR1spW`j;^MDB2QJTDJ?xY}Q)1-jXXevrm%OZB_%^lMX~3eK^io+F zcKehoMFzsa`NI3JVAH4xo%H4O9gg(4P5lD+x*78e;;DVdHG3WymfBSR2e*t!ICrHqE#Sn*-G`J}^TyDW8O-4tD9Np`) zVA;K6ug`|ZjDxI37dtjl$M#_J@p!AOkPlOYf{%~DYjss0df$HlBr*|s_l*^6F3|G( zU8QT+Z{Lf(=k`BMosU^a3yX@To8pOxh}`cgD|07)reu?59E@9KtSG;}487)IqDLP5s5HpgV7aeL?HyEuBKs*uk_xi~K_I#_NaS zef<0;YElMO+WN5ixoodSB#1%Uv5hE@ElZ~QueZ{MV^p=JD~zPeX3PiD1jB3XwxZI~ zhR>&#K8)u|U!U~ju5WB)?DD8WY$!Z#&WyT37?~32AdWMJpHpr~5_4_lEUHJ-Ka%kg zacY;u@w;6dZKXjYA|q`eyH$(<-#lS2tgknOZ0fas=1a=RpzKZ-ec9;4;vzs{lV0<~ z7agm$qw0G>jf*S_w8aEI7Z%a1J}svOX>)Tr?GnHK4gI;Q{ehI*mttYr<5^-(#SJGk zO?M|!CDd+WviT)ZGuz{SPo{_Rnd2czzC%Qv>KePiCt+YnB@trC6bmPM?7PRUqvyQ~rKzc@bEEd`Z_{@A>iNEs z=KHpM<$_lT2(9~$U;*O$p*H&o-d}l76sV%TRe=%zD)-6(V~I@2^TT4U>U5A(=eES| zaM1&YV-FD>14FIZlW+<8z|YOijdfrH_Cf1-{Utn|_HVDf!U4SMU|I2O=)=dmBiIy` z$3Wd>OaZff!x>L;^SvLU;FR`kWPJ3Vi)v=Q-RasTOZqqal_}+-ewomWwH%=xbe%y* zKgu&g0k2q+^R(`0JB3WOZ-jHGlsZmnG~Mm5-2Q~`AgT88{N&tzR=|XzjbYL4*!$IF_?qC zzJASmloU8R^TE?oV03hp@9s#A$laPwH0v9f4?h$_Q3!;t&eL36R=uR_HRpT_ZOP|! z!~WR4r8LoK%1~Uvdo0nAnipDAT+T;N;vlxnS)!qBz$V+)nvA4B{n8yq!Oa~gkdELjV^4P(`9dDj(AJS@!_nhAaI#6hM>%#FFQ{-IkvXF6X)5~uBoi-3TDiH(hh zjomMV%0J+7x!~Wa-@10pV@}H|MqXiyVK?rzfDG? zz&isY$*kH`K$5%VnicdGpRtiGRWuZD^|)qdG>aE@bKVZh|JZWxEYxE^+)PQG?M|WA zFUTx4VUJXrVMDEgr*_XT$tlRmk1m@ZQu-O{efH<7BV%F);ZXT{$in%+=~o9r=Ps3>&8;;#2FTb=XC#j!JOIH6m;2Yv*K=7 zXB7PM0_UZF>N*YGbOz6u2{sbxHC54MUT9i+zV(RKo?k-M|LwY747<2U!?$B!dm9Zp+c5~oumf4-xpFd)pHceDfo?g8=fYL@l;90&D>0*;3=8*^68UHgs|&d2&h zZhP|J_|OvoJ2;>Jayi{xUtec(_^Z@(^H=JVkPt5a4begKgC~#uwpaVgJ3+zbPZULT z?{;S^@vE0Sd7B>uf{)j_6_u34?d>ri?zW`nUA7j?XMZP9NX0Ulj*%ZUIw4mW4;xM7 zD~Axckgcq&fFWN4eZW9N`zSAuw(Ry?d`6rAV3S^h)hH@%`x%=J&i_t4 z2_fW-ZS`ijHekr1-=M|>NM$_#96$eKcRhs*EV4(p{fP0Bzu-d^{b5OT?eF%L3 zpvVY}eZx`PDJJInc&Xl(MVBqD;+j>-w+xT~@XN)9>xQdUB(Jdi8^2~>KR*`hr<<4ZNLT+j#!rEP4`?6!ssS33fQg@s3&!NHl& z@h!gC+Mp?$tHF#bNmfl88*vZ5pk>P~ANj7=x$xLbdcffH4i8^~;LzXvg6+jdUdE=Y zXTU28PdU`t@3iqAHuR@BEp~_m@Q2 z{n0EhS{5qd!euoS;`;yPWkjyk9eizJLJ^no1@SfZCpr z^s#B}k685eH9X*vFHi`4rgljHkekv@`kYLStHJ=~ z?#N*@&c{O=KMi{=SQ-O^4)*&ST-=?xPg4a6WV+;ZbRngs)({3N#XQd*Fu~$WO&$r{ zr&Mgx?dBX9goL4;`T)YmaMUkolSqrmQ`6BI^v4c5^H~DCSHPEQrR^68_LzRfOgx7L z4F!b=IR%By*|vP{=Y&rs+Kt;|IY@I=7Hhyx7y{1~Lgax8s>Njp`JYUvvQF37kTEke z3yX=31AGAG-mbH9RyHzv(b?JgIfW+_coLi4qUx9n$17K7(J+GEjW|?8aGDtt3rjhE z8=|JxGFxc|;CaD>q@*MjgaKPI*F9k2;2i@)SaLFf!1cOfox|?OGy#w4Qaw1a@Z9li z35*x0L~WBrn!P<-ebZ%M0>i>6d3f;R`Z>ef&8u{oa9*LJk^%an<=+*GKMvfd=hX@? zum<4pJ~f9X0zjc{mepSX?Euv*H;ro`mMFvm17P<6Gts6iH|p>@xO0`*a}!DUeC$cp z6aV&)&7LrT58fptBz#U2NCD+H^*NHtG{O%R>}w%(+6@7>hfpujoHEM=9v-KIzDJf( zZYb5$)6-&J1!&>Q3K|%kxEIAuEBc0);_4+o7PQ-1y~%-d zN%m$c{9og9SUVt{wRFsr{4!e`gU;hl+Z(9oh_pw3z;5;OVEDQGH&cPU{_8!=T$dsqV&x~UqLRjFG_SFw*iZ`>TB$Qgs z;l@NMt)8!2KIe@d9!FeIUP3Y3vlJPOVIITG{gWVA43X%4hOxjHu--Ea4ht424bYb? zM}mWg^x5&~QkXYGRxZ0mB{Ld7UvgcPT_SC6+yBM&XXo(yK67{y$`PXJD)uEL$ouu- z>3l~m!9>fyaqSp}N&US^>tuX^ts*B+iV31^HJT#-2SBdK(evipr7Sp@*LlLG0XUofwjWC%fze(uEC5)H z(R_`Kfw3_LF0OiKhq;c3;QRN1r$fA`6*B`pk(~M*n&MJYNHv)e`T01=O@V)AW@h&0 zs>1>Ldt9Y6=Jq>dzHvI9xZbS+qb9nCi);c{y6Avfi^!XdWixN3I1!bVmCbwJ?oaQd zTpTP&fqfp%TgBK?Hj@4k4uyzMSViS+WTZSeFH%) zQAQmurbvqSJPQg!G?}kKOCSFkdB_J4WX*1I{d*c3QhxpvfR96d|Mmcm9|H@^H$NYM zGkIV!z-Bca&7{cBXEE&a55d1A2M$UWG`=5@)+^xYL4^cCG&D3EWIx1@$tClz!3mQ@ zcC(=3-MyveW;U+Uu|_vkfO2|zxFn5?D2*!MtLKCO1;^I1fA8yi{m5j1f$}Eyyh@FV zLMK3>0+?~56Z;{scoz_B$Nkw(K>E9Y*}4WCvIgv*n&t0EGnHnX!Pt!Y2lIk}4f*)^ z5LDyE_8RE#&sI+FJ2KQPe*zv{B2OkIAmG)Pov|FyCbmH#V3IBu-53FXi~mdslp@7E znT+)GCt6dciYP68Q)MwXbPv0|-p&fgVcisX^yI)~1M3SX2s^`2 zS63&eqPlgfTwPlOh*rNdXya!-(0#;rDqk6cIeh{Sr`-Xxz9Xu|#9%3e4$8-kd!iHHZC{2SwwP(U!*LIM&Y!-Hq(kCC@^#VUvk+-ti(>hVcGW10Pqt?T81$W*Zw zpv6qlRL#QxetwdWKmhidjGjKUw3G=bCg{1+Nh+Nuzn71%y7qvZkOQ9RlXyVP&!5yr z16RpKn$;@x-xZ|8CZs$H*seX##t;h*2g?I@$ixgwu|GoSfVQ;O4) zM>QLOuFA@W5BG36jc-(*%#}==bMQF){hmoZZ$B>mD5HRNs9)*F zWHGB0M!*de(c*}ylg3X?i!XIbIx0u8{w0WGUy7Gb!_ zomi8*z#P63P~UXDc2S_1KAM1F#wM?E(>(bszj?sCg+=`y0fQ&(oD&FXv=8~ZhEo#b zVOosdt>QK|Bv#EZK@Q9L7Ias&QbS zh@f{ifNp3KpctF#S{RIGihzE@m8c3$eN@H*nlw11oAc2;{sAwJP*E8R?0WS^*ViQt zCy3nK>VSU)1CYkZ>$qnCUfI3C2(a|`0Qj zrj<9F^V9|}8LtkTHVF#;PbD1(t5RJGx|r&8kc{@EI+Mr3i9~|0jPtQ+E~nitO1Hxo znh-g4O%~FcHYpllW_T!1WO;&{65IbHM2ixP6zQ;3OL>-xE4+uG`@?#C>ZnvFzDwtI7M0L8BG!&e$4D;mM(V@ll?yy(xi zSvVn!Tep;e<&lEP06@753Xj{7}>gB$gSO?pGgOM#)G8wtj# zdL03~@tjsfL75TJ(d}2qjN`Q+fC+dm3L#GiDAV&vSlCC9UhAdz$B)Xd%@^uIv1r{3 z1PHmTy}&wBz9tAi=~KBoapnW6G8!J9BrwF>?pJRZ;(|v;-f&ng4geRgXh|KWpn*^g zid%!)>tsMZg?Gm|a5`M{%_Zb5p31nHuOl(*x{Klg)I@M~v7b1Q-w&mau1tz*`q;R;Q}!5bC(@dwdJ)HOMp_ zHv$Ty(QxYEYG-g@z$-xNyO@CZ6icVx2Z%-ivN#Bk0FSG;H|<}u8boU{S%@nYOGm=V z2_hZ}v(ZMzC+oe1I?apFG{Ch0UXfS!-RO@4Qeqs$Wg`ZeSRffFdyxqV5@1$<5(&0f zG?+9rEG!8iGR8CMJ!^D5*9TI-{Yn7vu3!d7f#_tfm6x6Uykh1vFu*CiPM^l@Sy(Fq z!oyD)vE8>vNPzI5mFlv|t)$#(RB;Vg2{d9-G9bfHUA(rktLMMZFjCfHR8|I|kCkx2 z%~FTm$GFa;XpRjTdBki09~I*rasx+dT!N7-q%e^GYxu}f*-re4JrzoKnQrHrjdwHc zVsGqqKQsSyxQY$|WgV2W{4IaCCu8?Za%UU2Fqj^za5s8FMRcKU=z)d`3FG{yB%hsN&)oO{0V&(SnX{Utbi?|_|JOQQN29r3i{W)XcaT9a(nDFkl4jkP~Ml<2FL}9N* zjw>w-@+E>f2b70^8?+H^c5Y=-429q4(S4hUIMMhAivLOT#a zVm2D^c=TjI_=-+W9spvB?~J+rN*g?|CZ|A`DF;5EQu;=ag_RZ7UiRse2i94+d)%

    6ClIT#FkocsR_bd&&*_)f$|cfq<9+!X9^kK>%DCB8maRHic?s-{Fxq$D3d- z5SvacpaT+vN165_^iDfLYPw8Uem~y_eFEIJb~AB>?{<$B1jYTpejE$W{Zi@oL!i1{ zOHZV#oW$~l_ct&K(LfI7zr6A@q}Qs;9)rVe1kU}@H_3);TMi)sV4Vh*v3GK^IRG95 z7S#(NVqzyDF1`%d@D`A8K`vbL=FtjSM`zAcM3sUG9To7kK*Lih(�n(wWg((VVM zCx}Vr0aYD{gBY|bbX!L&-T;n`^XC^U0OwS07klrujt8WwEwyU*%KK@N zXHJUFD&dj_RWb-LJF#_R1{;CyP4yTV0fZlHW6|*j10$oGl8VP`E5tl5eUzxEk(%rO zTSC}Wu~=(Tnv@=d1>pskMxA$Jyl?|=9A6U3`Bn7*_{D#wrYYAK=UvB5#P~9Ah<2ga zu)unG)gm@tR6Zrs-u*bIk&SDuLy?@_!GwwE!LRWZTo91FzBNZXK!Tw5;(uEpmA$$& z?Uk~!O>MIIVnd)=Nn`Bg5;P5v@li04KmzINCWa0G{2v$`Od%kU+S4OmUQyw4x!`oY z^su;DIYoKWLxu{62x1TS{;#^ayU{Q(9@#}kM#jCK28aeX=gZ+%&+Vt_0Lfn!1TeyZ zd@tYbM@~t3JSs+91{U=gmVe>d--#qtfvXW>>KGs>18cSlYGI0tle4fO7EJt2`+)jG z=7>EiIk_WRHjIps5j(o~@?AGze?LG_<>a_~yw+*J_5dfr-$ zja}So?)DSCN{EQqPV--Jv(8MAPU46u&a^i-3C+(3F}R!l4^s|pZEe7mijA`Yd=sZ5 z@Z+QR3s$|hf|`XU3L7#q522y9_I6m&dVYSn)1fw{Z0Z1ze1(%ZEV!(fp8)$o3N#~$ zXv#9HrNyyYy!u+$#oYGDiu>63_|AMSD{wJ)V0&+DVr_$X90oCQc;-(mq9!f?K0%mo zzy|?i`nM@YT`>r_7y}^L4&bcnY$ox?PoJJ0AKMUcM-71|{UEx~1-lgNIasK-sc%Fk zbme}H$95Da0>o<&%)W-0=AR=KhmMcm4X|Ykz_QboX2v^>JNZXXI?DZd zfUaI@IxhMGwIK~x$3nL|jPQ}#LLedeH!2Xf`)h1)Bgw_391Jv=POa3drlw{rO$|)= zBja>FpFjY?$pv>14g{;3#d2Qp5uG~Lr)W32Mn*=q{+%cQ%YX!|62J|hR>Jj;TgTy0 zzF{m?NMQR)IuI8L1nhmltRdGgsJNDpU!5MDyI$X&4ukj*X9|x)+t*5cCY(FFDz^P; zj3p1$V7jxzmx%K6qd)UYsCGfD=I*HdGKBC}ZinPyXY8<5-zzd8}u14pfc^n!V$Lpiu3C?$xx44IMo(Rdx#9@${-$^#k8c1 zgVqbM4WHEn_c9(op@}5)V=1ltb~bAuu-rtcP)s1vmv3rL!#1*=nN&k<|N79s6VdLR3gySZc7ragfy1TJO zg<92b!@D`98)B&Uo3`dY)14J_jc$<2IGYLMY<p8&Ey9DSr* z^;mv`yEs{zGP>ER!-TEeabF`Ss4vyka`o<%7wA10^Oo4ArfvxU`~lPacg@tYkHp|N7m{R{xgV;gWyQ;V-VoS4@{uDq`99kpKcD4@$!OT`jL07d@T-Zla3&KMw8O%24RwV8cKI)X9IjSFe>^t zXFEAtwtKAsK&>}P>;fUTj~W_yz>NIN{|3z6(WIu8f`Y>L@tyUJIGxQgDaIVdydhv( zSfTfPkFXW!=-Y-RBDv|4$2xijn>0b)PeX@9u5fNk!60T0468zU=1KK^31 zU?7RzkDdYn@1APP*x2~^X18Rt+DZqAa34Sb7Ca~5cRi~aHl87fD95S=y~gglyaK%1 z3iIt-W`mwLKv)CV8VU${7+(=6k0_&vlY{iy3e<2l(Ayq8-Z{e9n1nfG@;ey%+vzEdzW`ek+4ahzC;YlJOU`>mR8Z)ye70=Z>!}S?C0MyFObbu>1;J$6A4iNHSIy-=J_y; zxwr_AR<9QsuV%_G0P#WNB;C;9t*4Amx0bH?XR+b92i9ghzCN3!AH=0f_H>cdEj1pNDNw3bfY7#s+A!C1<$Ujv%lRJ*JJ_ zZ)5kePoqDMUh`2;EK{SKHm@2ZQ4)P9rA!*^0tB;dzSa&lSF^x??014{<@313^g9tl zL@V3IM~hUJ?jnHrHors$vJal#T_A%g+bk)$_sQ zZC&wg3CN=}Tx<|jFVn9TJYv+01jUXzYNbFL=Ej-pjaCtLJXuy*9U?W~#&Dw?lVA56 z!_r9A<)61FmH!s3-G6=|(n)|T7V^s{nE>CxEzD-w151&s42=iAHYTmnc0QIp_8T6$ zv^f{%*5(@Z5=M6Z&qS4d7ps{DkbekrhMq1TyV}%yoN|D2P2G=0oDMXr5ard=<(gE} z3-gNIJIl>)Pv(B4At`u%hF*Kn2zqDV9d5Wi?>~|AoGZUme!E?>ajL0)y-yqWG5z;5 zc;boov@c7hq#zIi5Ts@W(#&#RnkSIE+jNTALmx`q1(Ypx92wYCWR<6?^4Yu)FNGa$ zV=Z(zhI4aw9rg(9`!-{Lq;+Ubcqm+JC=29$n5wPgi-Yc~HF;dqKtu8m$`bpiXEp>XHL+OHF>1#`v*WpO_2#zE0AaS%tJ# zRe|Y4dC(VtmVhco0(LJD9$g28ENF7SRsGt5$Jqa>kL9x^4AFmNsH_}G5uV0>hS!yJCpo-AX@^Ghr16ow zzXe&OrnDpIHsC}F)-|}`D}WUgpZ&LF8z`mwzgFjZ1GuOQ z59LdEGW5t~<-Sj$e;Lg7pS=Zr1BdUxIqQ$A9RG3hJlV}4hW_z6$PfOnx9$CpqxoOz zQvHu(@IM}@PA;daED(iLslWosu-_5;e(^-TF@8d5&jz#;lsLG-wbzbe|GZxY>6|D6 zM~RFiBjabjebcLzJg@ITqd(Ct*)mM=9Fty`Y$7-g{qxE1`Px}8f+>7TkF3D~_E#U2 zmD}3d%hW!7Pr-WZNbS%BSugz_cbBoFe0g8mMl`E%G@#~Y9zVy-Xc8t(4LsZAhpJ2GBV$%hjnz2gLX zGpjb-WouJmwaKM)I$n*JZ9MObiGEM|E&Iy)v+`a+BAK|j40--v zpIv85cH&*G12#_r!BpZ4RadTbeR5Kggqpr?ga}k`_T*P)wNoJ^!oq6dF~2~uUKZpE z4Szn>xNbZUHv^$phxuI)1O-1&(S+7Pb04RgWXPO~T|ih~rW}6B&ZqGu1oDO$eqGnX zmHCp-o}ZAOVwzsBrpmoL`#rRLD#N>$>hgYzG-z`-d5Qh4*=J1O(_Ci7`heYfG5wGb zUwY!>KHlKoKP_W>`Uix(QA3mZ=CR_x|D?G*z!LW8zDHO(ogUJnGTjpkTFb#P+1=BZ zFl)G_*$`WRnw0|ia)`WGauLxrP%>mdhsgfRF@7SI8`K%qvpK$)laQ zaW&^@<;46zqxRJG^Tl53O&n6Bx2!wo;cC%&aH4o;^}%IXb#JsG0dH}?S3QD!gjQ6T_pRUIMncKO zRerADCL4o$;8rS^S6*uj-{ov0g!xp5@3KPxBJY9AtM^<;gKn)Kj=uJPTWq&;H?H(J zc6VpNnerv7bL&`YlU7RM-4erm*Cd3Pns_5&_K!0izE=kBYb2_pb+}*C_AOD!xu-Sh z*O=O#XUzt#LMF_25~-d?l4Ge4sj!@_*)i5{*hC8=fh3Wj8fc(=9Hah12){{gm;K+& z@;gD<6SMr+ZS^*X1)Qwr*Kdsoo+B5T><|tM5-oWz%>O_eZfXe?dV5@=6P77-Ac-p>Mp3M~nvZvlthvj_XMGBs*5trN%zq&}9UU^1${$q;0Rg)y-cyrFvZ8rh42{fO`ek?75k| z8CBnAR_zHfw&QL7TW;6fC%TBX8e{Rj?+e4)vjgLa6K}33vYSL>Cw9|KJ5NwP(9X7} z4p6UVVjA|5MwL+DMH;!czSE}wgf3h^^o$X*Lj10J%e`Zn1?oU zWv>|b({T7@=`Yu9{7NQXQ!MH?yDTjHUY5p7g-g?z<0+P}rBE~Xjb}0!er$VD1v}uQ zprs^xA+)4)?Kx2$OXXwODUnJi3V%84LMP6;y40$5W^^R9CNf&sG+&LvkbAnIES|v{ zd%>?@LSWivXF^2m(Xe-=n_ab~(q8tpf291SR^`S(G86~f>-3$SaZ#>#iI%|cJLy8f z`5>8nI&kuOoY@9r>AoRt@_i9gK6AExka2h2J{Qaodg4+HF8MkMu&LfTA!Xo$O?^ToEu$%faDg=q-XUT-*_tVe2L$|(6fOYk~q+o5*KB|@dn zQ8r=^E1p^y3Wb|{V>C?3&yS((`yJrnY1Qss2V#80_4y9$hwH#yv5@s0Gzn`1jML`Z zWa24f;;CE30=t48waRQ!VKkL^Np0WNo-D7P*^TyuLBDfumrQ7i(=!x8=M^w4?4mZHekx?ku&^}mu$Wf8%`Y4Bj$aXqu5ax40wYBTXkI}`*i$)^>tAvu9Ipz| z4IX%*8Wp}+DTI+7`D$C|D$z|``T#xgrAl>^y=RFL8YMwE~)AwN# zq>I*jT$@+I`tG9;`jd?4G8;INaQih<4IAfv>}gxl>A!eF!>xh8#WjchJE77df_$Xk zCoSf#{mm;a$A>r#3St|{>lps6gE~Y{#V*;oTLeQC?jB`PKRS2E;freRrCDsGRnO7t zsf=T5&42QrwS>L7b@gLTDvjX_k{fF`Pd1x=f9^daW)3)V=%3c+ueO}YR1tEMc!R&$j9k&O-RE6#p#Q`C>Wi9!L5V0C1~@ zr?W*v*&H?TRN4*EmXV0gQawD0;fn0awPSw}(liNOE{ly)syU~Ck3vfK`MrKZTc=Qk zXK!(mLAcmsR79+LL8E&bOKQJQom8b+yPY4FU9u#+q9Zqqii(;iJ$JDuy&$dk@u@F7 z{NqnaRr?62s3ppHRObo*U~MkA6eZNa@s$otOP&>&Ry}m%b`YcHdiH?M4azRqa7yU3 zc!_+7?4UzclEN&GYp55+sJ&LQD19N2v`+(Gb_cpY-eNY5%MUTS#=FTH_LskBZ;HX^z?UYQD z%dOji%eKp6^O&Cyt^U4Muz3A|(ycovMc!N%pRr|OBvp(8?vJwJz35LW^ZarkA+1tE zUoq2{x=;@iJdc`HbrTRN&bRDurXg+x_`9>yw{^(x z5jb_|{LOY1f3nAcuUAPBrohXS8s%=g`<~uOULa;GIxW#XdAf|vn535K4|VYxO&`gr zup>O17zuY+%UlfMZbyF^L&G%EiHx`VbN>aE5}3@to$`r3m_7PRlq`d5q(HSu;8-#v z(>0?}@z!C}p6B?h!~WL4E8;ZeETV{ogd_tbTt&RBN05KX1Cm?nwOqyi!S9y&)xE@y z^{?|PhQ^0|x$?ME7yvKC6EUNIjGJ_(4JR)Va(> zQbzgYpDqA;>J?J(ee;%rxwnpWD>=1P~D=(z@M!cDPj{Yi` zG)P{BgpR@5K@_(H`Be{T5Bi7LSKrC9ob2Uru|(ksFgs#?2J|Dp>!beu!OJV?yBGPJ z=RZn)Ka9~yxc;hxl1HR};Cbe^eQ1Xwrl@>)%XjX0&VBv|Toa=7?75D}9({!=t1d$* zzTd+M&eqO%OmfkV82hFo_{yPbPBAU?gPK&o3~W+bpmn1~N~zQ5a}f7#a9&Wh9gjP_ zC>9NbjNWVi{e!}5*NPq>SrlmD0+&Ilds_x0GRJQrKl^LL_Va{d2kFpwE4+K?i{Q|( z?|X65l8p;qjGrD&jkS2zic5mFBHN5aw*R~Uu_Ijh`t9F8x4m=$Xd*=gnqM?7aA)JW z+P-2Ctn=B$iM}a5s`GiyE6Zqb!dYrOh;Kk#$>g1nrcsNy*sj^zFZgYvf*GQ04G*8P z7(+&&YTbj|u2i56D|B-xb`V@j(&@8ClP^GFZ}(IgO8Lu^t${f~n_}i+k(vKIjSRs% zy;Pi(rq*+eAu$oHczh0*fkqjDtcU5BQWitm#P{LIyITD;ibHgmzfzHO%HNovS-j`U zzohpQ^|+u_^fjcJzZ6+RSi{?DyL*s>-sQ_!kO~d4eKN-NPvc3D3Je_>0c4iYa6lQT z#=;C-x9?_f1?A5F$~=!LB+7Rn;bDpKc0-hzo0mV8DgJwjfY4|(a+fM=dfN;a$mhVk z)Pg(Vp9>Sr7)%*AnRG%?Wea1l+-vwodn+hnpQB4U$Jf6RnuYMyS#Zie<%1H|%b^?S zM%@7nfj35sNx1}?6l;-`dhBl3!tY&k-<9N9b$5err4Ro1jOcsC59KZWHP z&c+Pgq0cy=v0;G^f8LV3uD4!-a~dZ-!|C=e(9LTet{86Wf9s^@i#@i?5k<5udn(7- z{hhS5*)6Gen1Oul_p=djFU;l1bZO$<gmf+SD8)5j}Q7+d-PY{Qp5jF z(A(he7N@;Ce?or-gkLdFJ^LQeaKn*2m$quX!SRQ?_7`jF4HPHDtiLhaGy2;Sn$Vv? zMr*@c+=tj=Ci&V=ECN23Bue0w-TRG{S{xFq&!#@+Y0om(sJs; ztW#tq*{QeWC<-4=c!9D679UaI>R zI5{H`sHi< zzGdda2>OwCu18rXy?(pl^iZ1q`X2l_HOFL~A4qFrNYr>rH zNVL(&d$_Nq>m@?sz+MRvYxf-Y<6{eCVf1kQn5ebIdNe)3eM7Q(50~tLP5BZl^eP`M zel2WM-!{#;&t4bGpF)4GK>7}>SP}D{-6arQ*^;Ebco52l3;Buv%aU=$|IiVu>U3ZC z&~L+e*nVm%0kd$erTEoXZAb1G%K`hJEAM2vokyIy1{W+j85^${i_M~2&N&zDHJDc& z>){q3Sg^^%y7o;hik2H?p&f8?rNNy@lM$M~v%>m;{P;V_NpMy~z{`r5cFRNZz=1Fj z^;!4E)HnmRuM5?b8_V@o+0&4O`U;bl*Q80PtA}LzY8*Qf0`uz5F^e|W=f9l_H=?ah zenJ{~CfbpvOTV(B(QCYMvt@MWRjW1N?H*>?-U_n#5#^-!uvB5xX?gN=>E2?mhr1`Y z#3>3@?ZCpES1u+|IXJ59;`kb>Qo8hVnyN?bh`6bE(r;IPP@%i$Vm(SzE~Nk?5ks}p z_tL*}Ai46phljjwBY40&EI_*AuWv5;&-)T)>A*F>8bWEF&U8Zaitx| znyo#C^yVkBe(iX{M5uO+im1OjquQzK_>aLvhmzY(Ay=PZ(s@m z!)1@r`|6&Nv1~<6ubyls?G!Rbn|%$9RncQs)Hv7*(2|o}H1wQGRFD3NM`?iN@D>t0 zt31EgS9W|W83<1xturtX7@V;4^E1SsgpbV<=hlzEwQmfVNjiLd?ADJjbm3maw0&Z3 z__pE7@y)&L(S4DBxX?y1UlfMaL*+Vek`Ep0zKt(A8xr4z&@ExE7Hf5kT8$m?{zYHd zgxYvKu!*_r5?>_?O-6TU)q7>_R|Q?a9{YfMRzx+T3(k9*J#-sA_w;>Unp;} z`c0E1{L)*@Vx8!vZ`w`}Q3|+MK{@?ojHexh4v5iZr(CXl;USk)r(@y!IBq&rnV*l0 zvrD_6WQ}!y826@-shS?vKGzlW$_nJ$ZFpWxJHB^`OHhx-KWIppPH%%GfmE|Wlwv)L zT;;zw3Cz{U&}cDhEi83tpAQ%OV!$}%M- zLy-kwQ3c%LnKvvAZ$+qM$<98r9kySYX-?1zHeRXK=Rpv9J8F4k_J?(8tzOwIDE0kJ zOL4ePtJ4JC?Ca|YM;H;Q!KD}h(mJW5&6_X_&F`F~=uWdiEb|BF{V_0l%~bk^>oZ8y z{Rvy|LRhqMtvcreE6cr-J^gNR?hpmHis+!riNSRpK5}qT}JW;ct;1vbMa`+ZKmyGYA(gesHkWQ%%eS-G6F{#HRWP25Y(Y ziuT-hhgO>#e`Pn1WpunCLZQDWMu}0=Hyk$JorjOln6SA*Sv!wy@G@WPcMYfSedxkf zc}*sWU$XT5T%-UdHO;rXZXTGHW#(FTfNY9eg-Q!$zjfM#c%{&#jIuS&Fc!KIDCOe? zo?fBNr?P5EaJ~u(An%9{=6x{VEs&py>yN|`cqZG{qFzg*OhCmIl?3fljesBPp7|0N z>-6MP@m2;mbUkV1p__`T?H3-zIuhZ zV;Ld>_Rc!z;7=Lh_AsSz;?JQ%s1=?Q)y;j}D~wl!QPFqFTu~lj=Nv?rk#+;x)6VlV z1Ppc%{H3z?WLn#I9?ATUrY>>xVKHe^j`y`ETyAu&_qU{UsTt}S(>4s*c% z>J6{O(mxJ7TI6Y#P7|<(;3wOx?~mJEShz0~5&iKA+IvnK@qeCV&;NY)&y)X;KQweA z^8pczTdWh}W5-CbQ|40aW|wHh8AuY`JgE;c|7i_FV*IqLpl0s(b#x3OH2KB%k2@{^ zVlB3Cz{)ViO8(EEDp)E-{PSGAR{rBZPe?aJha(_u+HMrylSSb%PI#nGXBukJJ#N@r zt1Qgt7!K{hw`YGJLGGpa+!>%muBzklZ2U?4lXj*)UP93OU+>t26rTPznF)%CG=! zR(zY&oXT+|)N7NyUtYx3rb%VLV@4>LO5B~}$Z(90+augnujNT0yGJ7K4KSBT`$&In zbI@NnZ>)9o1l!xfTMtu!&iofaq*T0~?zE$xp1xTX=7R*In`6PItkl)2V9`)YX}^KZ&{29O zXw|{g)n)%GFZ%yg+gCtE`K|pD3L+&Sp~Of_r!<2ooeBcdjC6OmDBazoqzKX}-OWfy zGsMu{Fyx)_f4+17_Z~g>`|kZNYq1uKc?aIv@7~Yz{NmZ0$_QFn|HU$1dg#KH`ZhyI zeRv!32zvH<;Uu(nOasvZC9A%kGp2(WDbAyP-^7~pnF#P+-}IlOIlu? z4)M;cFH2$y>NcqP6U?R|o?qwhS&ZMyUf4%d$PfB!yW&y|%`ZJWCb z>#zB-*L+{C`t&tFX$=`QUvPamgf@u+#aUG(bFm1E#;A!oZkgl77L?MHF}}~&&{+ZT zm5|EBMZ%S9#z+p=;d0$Oo?>31jAthcox-k%6I_l{)~I*v2csW_S7$ClCknYuP62R} z^LAFC73s+wc=fYQrtao%P21ADd1IP z;OxzU!Q;G}ur;yyvkIEk_u*O@X|=Ai&uzW{+G38CHQhV>-Y=aNl_?M%1Qw!O@kFOUS`yq<8{-RiMMs901Dnmym8X+ zCdwu!lE+WBUUgP{Jj+*FX%81^c~W_>Ll?0r8@6tMb6=nmPHUp*85B|HQUGH?4E&+z z{!P8DezZ^DZ6ez|2GGrs^cx^~2uAB^r+RcD@m;RSRku|a|G1-S0TlyJ&S+m0_V&S8 zT$w0)x4i?o>C4U4p=D@2CJzYe;?S(sb-U%yzV#q>7ts47h}7ZguxpGPm9+tW_u)DF z$W+1WkC6dEqgH!`;kz?wuOayaQ=cpjg3Lb*^BsuW41v4PZyGoF9fX9J;>c!l`4|j# zl#PF$oyeG})_+l?FSedYfA7rXMf#je#lox2OxTQ-(s7Nu*p}Y|$2;;{L?CCDJSzj= zE`(vELR-T_NmCIzhp6zgSdF5b0zTa^+_!WJx=(rAmxl4P}Z z-m5*O?A2b*(PlRB8I|Dy;k>`Pv_`oI>=>;TiCeJcKv3!W9?X5C!vjNfG9Fh(DJFYs zW%jzeKkI}(3)-vcxcR;v7|>*yG)2A9y*iufPEK*y&l-p{kRl*8tZ@Emak_HJG?~t5JK7IR`HWeh z6&P$Uy&@}C!PjJb*2I_CtXFkunl(&Gsa2N5RCbAglJ2kxOaGnrt%Iclw}VWN()`NP z+-3dg8+3y>a9IqSwDl8khBkLkA%P?=dP`KdC6QJkwFYl!uE#6Bep8VFNBL7ypG?#f z4#ZSdIk>i+*gbCaTqy8$J>7O}HPk_?u|j=Ial433(s+6|OwaMb@VUB_VscCH%d^Au z=ErJ<-Erq;B29#Bzrf1Lb`hYYWO*k4{^XTAb8!r%*|7^l45fB-)gj2`(x0$`0|{^h z8zNrPZ$0df!oL^FN^f)Hp_joM)#Qr1P^MP}f=qZFp7sx19zG$xEv7^tEoFkYu{F@#&XXqV3x`8dJf7E#-@OE z7xcOsWzVIymhx7$y2qR(ld5kS>zUX|y|?m*Z^v}s+o(5kCrmcga2e=u zWM$KUe3=~?#QK+Xp)HrB$jrB(1SE1ewAMoK=cFQ1b)ddr)_$Ix5 zdHjoFJkcrX+*c!km-n3An0lD#+4-bLNqHQX-kAt76HS2b;YW~nHG!1(-FG{Eo|Qxx zlPA*;?NQ>HhRk=IE*?>tOjW#0uG<(`iAiWojIZ=upJj;j#!Yqf+A*}kuZcXLCspA=$r1OykCX`?fB*%n8h$#OTTU zWbBRtDZ9oZOVSSZFf#DS+_k6nG0ov!0ls`!ufb(0r@KoIr*phmbsQsqQOlB60`(3} znxO2iM%Yr}Y*MqgcS=s4E|4YIQ=X?MaxT-*&7i^~cL1ZApQ#(Ts3oirY-VEGcO!eO$` z23Vo7&BP^d6nEJvSIrQat2SQWP*;JYae+8wwqDikdd0HV_M8>kI4^&CYFyOgbZZsZ zS7VN9o2&R_qYD4aGUtAt(q~A{$};BI4Fw#t@^;O@xSY-%RWV4JrcRljqB>~%ICmiR zd)fYJjvc5zUP~~zs{&M-gn=7@8zh1)UVur3(4)<%2<}!sqM+i6rKrXMib45)!d>YnW}c zG)inUS34LeT2)PWDB$_1iLLr|F%eTl@)--MA=)|by${jK!}SULdDqm6q1kZ7xd?#^ zdO97olZ4MV3!O==OqRdQC@NTq&hDmaN}|)D61Z%E;vg+RCy_!H8x4)>gF;4#a7H zhZArRVsD&XNJIDS+}n+;K>Mn+7SB;nqBHEMkt1tl@TMUJEB!>?fJDx$&Wq+TcH_k& zN20Sx@*doimjbajOe|4kuU<7kzYr=%5rjB}l!WvrwO1S4?!P$H!nUkoC%w$b@#TGtZ`k}!E-W7Q8{T#a@Tr3 zJDF3-C-a9@688?c8Q)FOlhd?)VBaqRm9YpxZ2L6ub`l?DepQnWSQ*XQ|A%Vn&JU<~ zoH&m=oeO1{cZ9iaGoG4=wwv$f8{j|>%Rq^i{6+ZVj&Bnk4>lo-mt*b?0+w9DhfmX~ zQ@JBZW^NR1+V4D;lwO)Swaw%xyG0HMAsGUFNJtS2;ZZ47G?s9Gx3!fL1xpzPeC-6} z5lGE+VAcuxQo%0I^5%Oswu6=5StU&;`&Ef0sLfB%epcMc;!CKl)^)r;p1ZI6yz`!4 zTjtFQFhHvCD_b;AAv1O8fp3w^qyQ_5FP!5&5jmgie5&pU2hj|IO)DI*ulmO zi8hNTh^#)YNMZ@k*<86V>?mZ>LlL^d_Vp3Is6_fFm z(0&jsHrH$KG#Y$@D?hQF?Gq>U@jzbN7JlH-;MPbo32dN3?BGgTa%=Q#u^U!}_3dXg z%i~TJ(WhxEJXEW7^p|C57Wls@R2Bp*Z>FL^qv~bNEp6cX6z4RD0?m5$5RUMu4E>U| zXuv+D!O{3jiHV5e_-te_ezb8_@PeBYm%OlJPTZ%tq!s~d`iXmSVWy*p%WNt`uSC*z zvWP6gUKX&(HrEqj4Ija4Z7@?Q_1klt^dC8t9$bD?k69*8o-Uia6Wd`e759Q#)ukD|n5?DW0ZcgL^To}wO@T^2LM zS3lKK8QJ~pm?m)g%xvhODyr;yAT9Hwe>>9DfFhGUcyhf|d$eVqM{iK5thEuV(11GgVu?1 zeP%Fu>uuAx$Hd@Npnc^(BmFk%kg{yY!_m=;-BQKnjt=f71g0Wo*bgmx@F-lahSYu{B9|tRxJ&AfBEYu-|LG*CBZRKenG(7h* zzD{H+8xe`d(7~I->kpcO z+dBNeF{(^)CJ7@X&2HA6_VEDK#bZ#xCFG zr38j70~72fsme!_xGgukM(@9`sOGSCn`3IYIea$D??ti)-J|!sm ziNPsfiPNYcw(J6gZ@q?UB5~Ih!}@0zrru4GLs4uuQ>Dg<~F46eq71GE<~t5jj53K-OrkFTVGd<8i-BK3(I-vF>mF&f}pchyuiqw?NM4IZY4@V;4N>A*R zV@iznecCNB=u+#uT;)Mrj7aF^+O=P2otVDe52GInR3Wu?7i!U4c8s5ta(2W=)Ob0dXX^FykoLqD7rrW4e^HSlzM<>kIflg z)2v-|Tk7U4*p@orH~G^Wh6oSKgxRBUp*`U=>|G{XUw>}*pQ)B|B&^JT4=d?PJE<)% z1LO9O$E=`lnF+~j?B&1qGJKCxgkB`u`=nTxJWJsTZ{{i6{kLi5C#;OO#^JtA*Qx^j z3ny*|7egZ5C_opV#>|&KPkc$qt7}8P66xwZ%qVPJ;m}#DYB^m*T;3g z5=DhSq@-P$ZnyU7$;(ZPI@u6)H(?A4Hl1=+gWX59C< zSZepfXieAJ(a=r^)-$g5O~);7Wv{N4}NOeh8 zz#*!*-2)Oy@Dc;5;|Wg;nx2M=(%W2?^~n$HGu*HI>vOIQkj0Gf;gb? zqJP3}eqXb}vpH#dr~hrt#!c@{fH&R8x;=W%(-@;0ip`c%m*{lfKzGm7_;TOZytZ@E zUvW81`(~T+yN=co%U0|}z5&&{30Y(l2O!m@sm#ZT`mYiB7a>zT5usTcD<7PkkQvj zHR&)dS5*|Tu#icuITw4gnPo_b-+r&wfA*Ew2igsA`C%~X-OjBD=7VFh7@4H=)LJh= zEA3MZ4!V++Pk;fTxnz+>^n$a9wEzWdzrRDM{P+bEoVk33?9OW6xEW4=so|LO>g%*M zaGLToR=r(ZkL8A-!&BYVZo<_;8f!^z9z9L*Jkmc=%YOn_*zY&z1{w`9ew6ue?(WEI zIxh>^*eRRtW0UVMe(L>{B0^Xn+1b=pS*#z#jF3{roS6Y8%LzijwIN889Uu!a#K8z1 z<^}iHANKopsXN{Y!`_}en{Zwl@;7WZoDrPU48Wd2_z~HBGyk=elBu*8p<(@4*PWv4 zf})EiDlBUY(yD@vs-0BTQudE~ax79x_9~2C_$5HzR8Rn5x%smzSC*v?$XCjkkPxsy za{Gdcax4HS%Mok8BGWZXrCJG$-MTzsTrKRb%h$m7IHiPKPHBv8e1EE7H49eEF|8>+ zPA^Yw$k^jt9V&L(8c(3Y=b-6F3;#OHY4-HM(}d@{GDqO;k);lonvamo2Z=N*j~nsW zK)l5ej?Evw28k7~1Z4}wQiVI%aw704(Y=U#?=ZCSJgZJ}JHmft zjTTqGL2Y~jUH&|~zQLk?;-vO2dU-f>hgxilJh6C6`kEo)J`DDI{Di_PriTDn6KFF* z6R=L$!P1GgO?8Wd|O;q=Fc<--&nAsPnT3RMU}soi%sv75~+vJ!cn^N zE3c5tY56_V5t}yR=v8FjoyqP@@A>sj`7=I?A4`x%^9oZeay%`vp+23t0)F)R>$K+U z9dDO-4`(RdMO1sO@C7-QuM?J-5qLqP&cExYU)lxYG&&WPbKsszt#S9S+*ELW-9%`( z*s&729C%P+uK5O({1+m-KeqG=(bKlKEAisk3s^kkZ_+)%$uy@CNP);`Sd-Ne=9xvd z8MWzrH-x5pz~+yOPD?1Ib44D;%w^uThAp*v64H`wH*Z!Id=fkB(KtfnjENZ+w8bKB z+Cy*b;<d0ugE0w2sPc_uvB|1@4yvk=tD8Vmpv#ktRDH>=oE`CbEE?}D8B2b`HNCJXuQaE{Hhxx~qMN)M;9=`K26G#yB7l!E5 zBoRH(3)`e->;XC*k5&6ulv$xybR0*wFS2rJFSoCW4wRb)aXOBshyhqU*_h1Fe$**a zTt4PqJD={J)XR$+X<2kKK;c{9#4S?2>xU19Jjv;}N*e{eyYx!OS)8j+U1lnj#julz z<-8D-nzLsED_(v*)_lD6!1L^7cQw}7jR|oOGk^ZhPl9L?vt7iZa9?3WjuR>iVjB1~ zgAv;tqWd*nV=UMS3~`|QqTZFn7efEQTN}~buuoEQb7Y^HDj4K@y`ro`Vu8ZGK5W=2Q626Qo@`7Fy^VtB*d4$aU6arGIneW1YC zbZpR>3-TzRm;Iek1ZXpTBIR_wUF4w6QY2|IROZO2C}@Miy&Uk%wU~l9SKAchmz~iT zXHzU@4eTD?o#~k|k$=3FTFMP|Gx>jmudk{{Mr7MRkeff;Z6O%`aLQ&;b{nMNC3!H) zy^uP@Pe8g$Q+x5uNh#PwV;WBc#e{V(e_*AT3isYK)X|Uq#tSt?qb3d8??vv@Y(5}= zr0^;lJ1P6Ar*P0pNkdEEG`gG1P+9|LG4W|rwlP_=N*80xUkmT*I;+MigH`V0lzboC zL@s^IYBqE{51>Q9E3@9_>^Z**p7&^VzUwObgJ|MCCfRu})&2LI`$5ejos(2EGN~02 zCHi&X-X`CJq))yvyq@xlSWbVlMPBJH3tKwEuH)Pc7L=rvZr{UDh62Y1y^}rnl-6n$ z3+#vL7{%=?%{B~Hh}zE~)vyQ9z{@<`&s(9kZr4H2U7ER)E^qrLKX0Vpvv^)28@F*A z1aaH0AVQ4<7_7w}ZYrck${Tgf4ln2UwuS1}b@GUua;fNhq)x`PyD`Ik+KMP#iqjz5 z#GY~kXj98RWwGBv49ycx*ll-NVG5*(ZTPWPhkP5v3&|KYx7N6=TX)-TkTCrU$DTHD zKWajnXjNEd`j9l`0%S7GLzBWPtM`0#5n-WG#R}S?7WQVU$g{~=Ry1r<#CBt`Mw2b^ zeoi0c&+mNSn}LlHuy4$6+NGb!lWq5^ZWUUsV|(WgI(7tLI*BczC!R7&Hl<(Na-}T0c;A+rQCRW^XRe~&$oR> zIedACN4o2JM_7b!DZ|kS)6J#qykL+x+DT;((mv9e$Kg+mM(^#`BAv?l3SZ*4O^qn;oXEE5Qs*SG8=5 z6E{LYgxHRplg+7;j%i(CFM(!6@U~OAT#kV;<#IGj=GO$A9`t?K$y9z7lV!T;{MeK_KoGhE8uS+J@Ce~#gdMrJHdN{{SX!sORM`_WonKIqnXrErQgPe5 z^9+Yo8Br8B4IKgYerlH@lNNI_!mhfe7M>o$828iCH%I>nqn3|iRpxX}BPGgZCL_>FsjBa#hxsMbUv6B0f_Oj#9DwZF?n$uCsS<^3n9jeewG%8Vtr)A<9)* zTzIqsYI8r_ih70kVWRdb4UdH^FBaX{?F3 z4YI%x^VDQKQUd1^SftX4;Qlpx8B zf87$8nqu_1S*356l)2}2h<0lohp73v&#s=kNput#r!~)^JTDxdKwUVxJn--)3tZJ% z9zht75K}?UacqZyp_ZK9VV-{arI>M`o+bPoxgLrdtj>UkwY+3KJKAnTMI`{y*x_hzPihocwvGP@T03H zj`u4gTUG$q(}f?aQOVDCDEU@Nnkomw8xk{VgGO@QSwT?N5p_BqSOT$;N9&S(`dPK`dG6&iB zDKYW$r=S{-=4$w}GnHw}#eXj0kKW6+UJ95^7mGT$_8hfcc5>-pl ziusY}@;XgUXQo*2?v~0qd-O#7qZpm}5-~1)(IL@Bj~^9#Zcs>5s?vlc@u}$>2I>P; zn*)yJVR6y*xMP|}8j(f=J6y&B3pg*b6!}DNpUA!p+V3EBUKcARdWN>y(x0Sy`Qw|m z#)X1z&{_D5KmlcSSkjG9D(L0y(B@%y_}gQ?2hR;T4gogjC^bJ{vJ-Ssh|uAdV=cW7^In|PJv;Pp@l ze1H@Lu+o({m}z)IYsu2S6{=To%qzDxy-D=lVsA>d`K@$>?Hd9WYDta9S(>h*?z{5> zW-8OUanwJ8C#fuTBMzwVkT=Y7tL@?U@gUWfn`_S`%F1Qc<%;i7M+EM;ZugNCe^ zCMP}#c25AiPI+uSq<$|V?Ju>j@!VswciD1Eo&*An?;WHWe{ePUvCcm_Yf51pj|_ov2F`jcY+ zrsQrzU>fO>-xTO_z^r#*fi8R6&_LY62NcURJu4#LdM;LQ6!OD9Jt1Y!M-2=qRJg_* zhqCu%Kgq%?i)sBhBQ(%5P5!@2O^~K=Qvi765Ia{Ej5VveqnepZ}+h^(RTx-HszIxgdNC~>F1S`6WWKIk5U@n zDTmCsSUEJm?H%tZtL*bg;&)A4l&kx#UL@tk2y6abK^$^!0FVXkdSZop)1`<732_!t z(lJJWpu0yDfbImG)DvFaY^6-S8qme%b;~rwmx`i~Wn*p1$8zy4{69|)0cPV{`Hjcvdz6JDnw6Mnh`wGpR;yXr}s!Jg3c)*vSqx;$MV?K9%zTb=pM-1nbqhqPbK$!d-WubU$CA~V2+y| zImHm>=DWWtj^KE5;(z0S{@q1a3<9;x2gb2+TIzm)MEen%4Qmqii(ttx;Je{wz+#u& zN(Q0~2@Z}YC8j%MIVuBnp?r7U>dVNE>)lUs1tR~uN+$a?_NIf>e-~F+xsLHU!j{qe zjk1;mrs1oW&w~Ii%V}Y&fvP_lE=sTTSjEm8eYSZ{_x?!q>G}=-qQ2xmA}8#BOa3b! zdv}MErmjif_g#}mx~IpDSDG< z4EyX_9{q-$TMtGyz`+%VH*|mJTj;F7{jc$Buh4#*c>vfg|AoXOrHLVjL>uhR-;E3Z z;V`&X`X#j}|4nKM{7Y*2`kW0fx0vpFSBXvnvIhH3Y3AaE8#m(Z|FPyztkth%;8R!| z0WaxHl2*Fixsg(Aj_2;8iYtmJ@vNQU-Mmc%c7miRX#rWjI%$JvTZ=o z)W>4>UuBh~MO4IrnLp{2H5Ix2Xk$MNL%230E#X$v8g2)RM~^R`h94Nh*^_W*xSXZez zN3V%0WnU3~H*>POdag5`vwg4OEvMlI(3oBM)a>~etYmf_cJyXr;-QhZVzG&+6HZMb zwRWVfKTAG4zTmANad~w}K>Ro9BE&==%iL61+U`-pa;0qNKBihbdfhR!Df2M&)pxHQwn0#twjAQj(41 z#m04U+MdDjW01G?21jc2+{D!9=z=Jwjh3AjnU&zHvcE`KG0h=?B^C-Z}x1 zk-9$qeT3tIQ~n=>4yHX` z>+CS>Lyw^y8&8pt5k5Ck@afl^yEdA+i88JnNfa%~2@Gus8zNN8P~55cy5`WoVEo%)ws;}K9d9@Xm8T<5 zWE$h4l#09#ijz@Sn-9u4AB5)M^G73sUjU(OXMUMa;nQm;x+d$}RruO!oJ(8Ny=*3A z@LiGQp(Bp@Z!xiJSa6|{{h^xA=xepxF`VMTKXULOdTat&9A>-?bKe5gbs7JgYJ9x+ ziPGkWy~UqFQao1s0ayIT>{3nVKz43qEr-_-&Q_M}rpq!f(X^K~3e! zOfQi{@D&>VUHIKs(aQb^uJX>mDhBlL&L`m~9(Zunc~m}IQ3Z>aM(#Dzb*^O^TlK4yK=dpyG`MHn}>u@if8uY=)5|GMwj?(?=G_?{1`szJv z9I}b9^)E7$;J)-;NV%Ja0lgxMc&Xg>C+nHyyiHSN9X0>>KjdSn1Hzzo*#j`p3);@= z>2i;U9}bxe_*aj!*ioM8@^9Jh`-PQVs9bgelw^Lm0t)E;-#JB*0%A|@%cOLc--^aE z{{_)_i-0F_S9SH!C<^04s^3Q23irM?iLv|g>_3aiUs|#pRpkHbQVP2kGa_7|h(xZt zKdZrRCT)MJ8mF++W;xW$pV_m1D~GNOW*8?^m2qpvHu&YWGsev36XMOhbh%J5QJf3L zx4SbLVLOTjl}md*VhMQF3tCXjcmbB}@8!_x{}+dmx4#L}o-47e0^*IZ%{%*|hxTtH zTvy8XT7!ZAAR@Ii|5n6dCqGB(oGX3!nEM@vM5KbrI`{r^+RAj=|Ba=2_L4@-@Gqio zB_j|yB3F1{p2WyUCG=v#JmOtRkZb#p<;Tk%2#gF}q#;gCXYVF@CyX1d1;oPqe{w<^8|EYiD=$NQd^Om2=&5@j{0waSe zRqpnR^wH&jg@ffy2;+@2OBz67X(z*{RNT(?c3O0CQnKL7d-3XTVAtEZoJd)H29~@UJoJ!iu zH_&J9Z+12WYw1U^kY7%fT2X2OOC$3toKAcmck;7~_}Bc-#fl~dJ}W!gI-xj3Z$h_% zdIYb3=c3P>wL0YCD_{A7yIkQYkKz6->kA0Q1aWpP1s}U+-X8i=lhul7v%u0Hi0an5~3ps_LFT&&u)RjMts?}?jZ(|l6QD`B; z8UfrHQC$7<=&;tih0O1|XJ6U#FUg9C>fIC(*UA(?3^IDxN{25BR9-Ht>(BC{G_JPs=n^>??Q4Eoiav+{z$cH9f)wY^Z%w z=3}(wLOLj1!^kk!fl~q4;Ei%XJUrc>0xvOiRv=l8_Y2Fd+co-XBWGyW1_$>^y8)GRFFVd#aM=?B1u48)?Mf%X#C` m{|ngsU$7I%p+VI5a9_~*(5v`HXcG$JOIA`zqWFcu`~LxcVn@vY From 5ade4103553aa20aa1483fc68cc74d51f03afa74 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 13:47:42 -0400 Subject: [PATCH 0970/1112] Bump for PG 16 so. --- .env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env b/.env index ae266af80da..d5cffcec0aa 100644 --- a/.env +++ b/.env @@ -1,5 +1,5 @@ APP_IMAGE=gdcc/dataverse:unstable -POSTGRES_VERSION=13 +POSTGRES_VERSION=16 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 SKIP_DEPLOY=0 \ No newline at end of file From 0400037b3c83b64a466b68e64b92caefa593ea74 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 14:08:47 -0400 Subject: [PATCH 0971/1112] Formating --- doc/release-notes/6.2-release-notes.md | 187 ++++++++++++++++--------- 1 file changed, 117 insertions(+), 70 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 48903fb8b34..57f188bdd20 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -7,10 +7,6 @@ Thank you to all of the community members who contributed code, suggestions, bug ## Release highlights -### New API Endpoint for Clearing an Individual Dataset From Solr - -A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. - ### Return to Author Now Requires a Reason The Popup for returning to author now requires a reason that will be sent by email to the author. @@ -31,38 +27,119 @@ and will be required in a future version. New microprofile settings (where * indicates a provider id indicating which provider the setting is for): -dataverse.pid.providers -dataverse.pid.default-provider -dataverse.pid.*.type -dataverse.pid.*.label -dataverse.pid.*.authority -dataverse.pid.*.shoulder -dataverse.pid.*.identifier-generation-style -dataverse.pid.*.datafile-pid-format -dataverse.pid.*.managed-list -dataverse.pid.*.excluded-list -dataverse.pid.*.datacite.mds-api-url -dataverse.pid.*.datacite.rest-api-url -dataverse.pid.*.datacite.username -dataverse.pid.*.datacite.password -dataverse.pid.*.ezid.api-url -dataverse.pid.*.ezid.username -dataverse.pid.*.ezid.password -dataverse.pid.*.permalink.base-url -dataverse.pid.*.permalink.separator -dataverse.pid.*.handlenet.index -dataverse.pid.*.handlenet.independent-service -dataverse.pid.*.handlenet.auth-handle -dataverse.pid.*.handlenet.key.path -dataverse.pid.*.handlenet.key.passphrase -dataverse.spi.pidproviders.directory +- `dataverse.pid.providers` +- `dataverse.pid.default-provider` +- `dataverse.pid.*.type` +- `dataverse.pid.*.label` +- `dataverse.pid.*.authority` +- `dataverse.pid.*.shoulder` +- `dataverse.pid.*.identifier-generation-style` +- `dataverse.pid.*.datafile-pid-format` +- `dataverse.pid.*.managed-list` +- `dataverse.pid.*.excluded-list` +- `dataverse.pid.*.datacite.mds-api-url` +- `dataverse.pid.*.datacite.rest-api-url` +- `dataverse.pid.*.datacite.username` +- `dataverse.pid.*.datacite.password` +- `dataverse.pid.*.ezid.api-url` +- `dataverse.pid.*.ezid.username` +- `dataverse.pid.*.ezid.password` +- `dataverse.pid.*.permalink.base-url` +- `dataverse.pid.*.permalink.separator` +- `dataverse.pid.*.handlenet.index` +- `dataverse.pid.*.handlenet.independent-service` +- `dataverse.pid.*.handlenet.auth-handle` +- `dataverse.pid.*.handlenet.key.path` +- `dataverse.pid.*.handlenet.key.passphrase` +- `dataverse.spi.pidproviders.directory` -### Geospatial Metadata Block Fields for North and South Renamed +### Rate Limiting Using JCache (With Hazelcast As Provided by Payara) -The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported on #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. +The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. +Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. +Superuser accounts are exempt from rate limiting. +Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. +Two database settings configure the rate limiting. +Note: If either of these settings exist in the database rate limiting will be enabled. +If neither setting exists rate limiting is disabled. +`:RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. +In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. +Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." -TODO: Whoever puts the release notes together should make sure there is the standard note about updating the schema after upgrading. +`'curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` + +`:RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). +This allows for more control over the rate limit of individual API command calls. +In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. + +`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]'` + + +``` +curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{ + "tier": 0, + "limitPerHour": 10, + "actions": [ + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand" + ] +}, +{ + "tier": 0, + "limitPerHour": 1, + "actions": [ + "CreateGuestbookResponseCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] +}, +{ + "tier": 1, + "limitPerHour": 30, + "actions": [ + "CreateGuestbookResponseCommand", + "GetLatestPublishedDatasetVersionCommand", + "GetPrivateUrlCommand", + "GetDatasetCommand", + "GetLatestAccessibleDatasetVersionCommand", + "UpdateDatasetVersionCommand", + "DestroyDatasetCommand", + "DeleteDataFileCommand", + "FinalizeDatasetPublicationCommand", + "PublishDatasetCommand" + ] +}]' +``` + +### Binder Redirect + +If your installation is configured to use Binder, you should remove the old "girder_ythub" tool and replace it with the tool described at https://github.com/IQSS/dataverse-binder-redirect + +For more information, see [#10360](https://github.com/IQSS/dataverse/issues/10360). + +### Optional Croissant Exporter Support + +When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the `` of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. + + + +Hazelcast is configured in Payara and should not need any changes for this feature + +### Search by License + +A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. + +Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. See PR #10204 + +### New API Endpoint for Clearing an Individual Dataset From Solr + +A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. ### Add .QPJ and .QMD Extensions to Shapefile Handling @@ -78,11 +155,12 @@ This behavior is controlled by the new setting `:StoreIngestedTabularFilesWithVa An API for converting existing legacy tabular files will be added separately. [this line will need to be changed if we have time to add said API before 6.2 is released]. [TODO] -### Search by License +### Geospatial Metadata Block Fields for North and South Renamed -A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. +The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported on #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. -Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. See PR #10204 + +TODO: Whoever puts the release notes together should make sure there is the standard note about updating the schema after upgrading. ### OAI-PMH Error Handling Has Been Improved @@ -91,28 +169,6 @@ OAI-PMH error handling has been improved to display a machine-readable error in - /oai?foo=bar will show "No argument 'verb' found" - /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" -### Rate Limiting Using JCache (With Hazelcast As Provided by Payara) - -The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. -Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. -Superuser accounts are exempt from rate limiting. -Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. -Two database settings configure the rate limiting. -Note: If either of these settings exist in the database rate limiting will be enabled. -If neither setting exists rate limiting is disabled. - -`:RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. -In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. -Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." -`curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` - -`:RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). -This allows for more control over the rate limit of individual API command calls. -In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. -`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]'` - -Hazelcast is configured in Payara and should not need any changes for this feature - ### Container Guide, Documentation for Faster Redeploy In the Container Guide, documentation for developers on how to quickly redeploy code has been added for Netbeans and improved for IntelliJ. @@ -139,11 +195,11 @@ Listing collection/dataverse role assignments via API still requires ManageDatav This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running -SELECT uri, count(*) FROM externalvocabularyvaluet group by uri; +`SELECT uri, count(*) FROM externalvocabularyvaluet group by uri;` and -SELECT spec, count(*) FROM oaiset group by spec; +`SELECT spec, count(*) FROM oaiset group by spec;` and then removing any duplicate rows (where count>1). @@ -156,9 +212,7 @@ The API endpoint `api/harvest/clients/{harvestingClientNickname}` has been exten - `allowHarvestingMissingCVV`: enable/disable allowing datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. Default is false. Note: This setting is only available to the API and not currently accessible/settable via the UI -### New QA Guide -A new QA Guide is intended mostly for the core development team but may be of interest to contributors. ### New Accounts Metrics API @@ -230,13 +284,6 @@ In version 6.1, the publication status facet location was unintentionally moved The permissions required to assign a role have been fixed. It is no longer possible to assign a role that includes permissions that the assigning user doesn't have. -### Binder Redirect - -If your installation is configured to use Binder, you should remove the old "girder_ythub" tool and replace it with the tool described at https://github.com/IQSS/dataverse-binder-redirect - -For more information, see #10360. - - -### Optional Croissant Exporter Support +### New QA Guide -When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the `` of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. +A new QA Guide is intended mostly for the core development team but may be of interest to contributors. From d76a59072ca5d4390fdd920aeea2f9f6cee313a6 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 14:56:21 -0400 Subject: [PATCH 0972/1112] Update --- doc/release-notes/6.2-release-notes.md | 202 ++++++++++++------------- 1 file changed, 101 insertions(+), 101 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 57f188bdd20..f3f892ce4e2 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -5,7 +5,7 @@ Please note: To read these instructions in full, please go to https://github.com This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. -## Release highlights +## 💡Release highlights ### Return to Author Now Requires a Reason @@ -27,31 +27,31 @@ and will be required in a future version. New microprofile settings (where * indicates a provider id indicating which provider the setting is for): -- `dataverse.pid.providers` -- `dataverse.pid.default-provider` -- `dataverse.pid.*.type` -- `dataverse.pid.*.label` -- `dataverse.pid.*.authority` -- `dataverse.pid.*.shoulder` -- `dataverse.pid.*.identifier-generation-style` -- `dataverse.pid.*.datafile-pid-format` -- `dataverse.pid.*.managed-list` -- `dataverse.pid.*.excluded-list` -- `dataverse.pid.*.datacite.mds-api-url` -- `dataverse.pid.*.datacite.rest-api-url` -- `dataverse.pid.*.datacite.username` -- `dataverse.pid.*.datacite.password` -- `dataverse.pid.*.ezid.api-url` -- `dataverse.pid.*.ezid.username` -- `dataverse.pid.*.ezid.password` -- `dataverse.pid.*.permalink.base-url` -- `dataverse.pid.*.permalink.separator` -- `dataverse.pid.*.handlenet.index` -- `dataverse.pid.*.handlenet.independent-service` -- `dataverse.pid.*.handlenet.auth-handle` -- `dataverse.pid.*.handlenet.key.path` -- `dataverse.pid.*.handlenet.key.passphrase` -- `dataverse.spi.pidproviders.directory` +> - dataverse.pid.providers +> - dataverse.pid.default-provider +> - dataverse.pid.*.type +> - dataverse.pid.*.label +> - dataverse.pid.*.authority +> - dataverse.pid.*.shoulder +> - dataverse.pid.*.identifier-generation-style +> - dataverse.pid.*.datafile-pid-format +> - dataverse.pid.*.managed-list +> - dataverse.pid.*.excluded-list +> - dataverse.pid.*.datacite.mds-api-url +> - dataverse.pid.*.datacite.rest-api-url +> - dataverse.pid.*.datacite.username +> - dataverse.pid.*.datacite.password +> - dataverse.pid.*.ezid.api-url +> - dataverse.pid.*.ezid.username +> - dataverse.pid.*.ezid.password +> - dataverse.pid.*.permalink.base-url +> - dataverse.pid.*.permalink.separator +> - dataverse.pid.*.handlenet.index +> - dataverse.pid.*.handlenet.independent-service +> - dataverse.pid.*.handlenet.auth-handle +> - dataverse.pid.*.handlenet.key.path +> - dataverse.pid.*.handlenet.key.passphrase +> - dataverse.spi.pidproviders.directory ### Rate Limiting Using JCache (With Hazelcast As Provided by Payara) @@ -67,13 +67,6 @@ If neither setting exists rate limiting is disabled. In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." -`'curl http://localhost:8080/api/admin/settings/:RateLimitingDefaultCapacityTiers -X PUT -d '10000,20000'` - -`:RateLimitingCapacityByTierAndAction` is a JSON object specifying the rate by tier and a list of actions (commands). -This allows for more control over the rate limit of individual API command calls. -In the following example, calls made by a guest user (tier 0) for API `GetLatestPublishedDatasetVersionCommand` is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. - -`curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]'` ``` @@ -117,33 +110,29 @@ curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndActi }]' ``` +Hazelcast is configured in Payara and should not need any changes for this feature + ### Binder Redirect If your installation is configured to use Binder, you should remove the old "girder_ythub" tool and replace it with the tool described at https://github.com/IQSS/dataverse-binder-redirect For more information, see [#10360](https://github.com/IQSS/dataverse/issues/10360). -### Optional Croissant Exporter Support - -When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the `` of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. +### Optional Croissant 🥐 Exporter Support - - -Hazelcast is configured in Payara and should not need any changes for this feature +When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the **<head>** of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. ### Search by License A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. -Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. See PR #10204 - -### New API Endpoint for Clearing an Individual Dataset From Solr +Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. -A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. +For more information, see [#10204](https://github.com/IQSS/dataverse/issues/10204). ### Add .QPJ and .QMD Extensions to Shapefile Handling -- Support for `.qpj` and `.qmd` files in shapefile uploads has been introduced, ensuring that these files are properly recognized and handled as part of geospatial datasets in Dataverse. +- Support for **.qpj** and **.qmd** files in shapefile uploads has been introduced, ensuring that these files are properly recognized and handled as part of geospatial datasets in Dataverse. ### Ingested Tabular Data Files Can Be Stored Without the Variable Name Header @@ -151,35 +140,59 @@ Tabular Data Ingest can now save the generated archival files with the list of v Access API will be able to take advantage of Direct Download for tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed content on the fly. -This behavior is controlled by the new setting `:StoreIngestedTabularFilesWithVarHeaders`. It is false by default, preserving the legacy behavior. When enabled, Dataverse will be able to handle both the newly ingested files, and any already-existing legacy files stored without these headers transparently to the user. E.g. the access API will continue delivering tab-delimited files **with** this header line, whether it needs to add it dynamically for the legacy files, or reading complete files directly from storage for the ones stored with it. +This behavior is controlled by the new setting **:StoreIngestedTabularFilesWithVarHeaders**. It is false by default, preserving the legacy behavior. When enabled, Dataverse will be able to handle both the newly ingested files, and any already-existing legacy files stored without these headers transparently to the user. E.g. the access API will continue delivering tab-delimited files **with** this header line, whether it needs to add it dynamically for the legacy files, or reading complete files directly from storage for the ones stored with it. An API for converting existing legacy tabular files will be added separately. [this line will need to be changed if we have time to add said API before 6.2 is released]. [TODO] +### Uningest/Reingest Options Available in the File Page Edit Menu + +New Uningest/Reingest options are available in the File Page Edit menu. Ingest errors can be cleared by users who can published the associated dataset and by superusers, allowing for a successful ingest to be undone or retried (e.g. after a Dataverse version update or if ingest size limits are changed). + +The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. + +## 🪲 Bugs + +### Publication Status Facet Restored + +In version 6.1, the publication status facet location was unintentionally moved to the bottom. In this version, we have restored the original order. + +### Permissions Required To Assign a Role Have Been Fixed + +The permissions required to assign a role have been fixed. It is no longer possible to assign a role that includes permissions that the assigning user doesn't have. + ### Geospatial Metadata Block Fields for North and South Renamed The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported on #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. - TODO: Whoever puts the release notes together should make sure there is the standard note about updating the schema after upgrading. ### OAI-PMH Error Handling Has Been Improved OAI-PMH error handling has been improved to display a machine-readable error in XML rather than a 500 error with no further information. -- /oai?foo=bar will show "No argument 'verb' found" -- /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" +> - /oai?foo=bar will show "No argument 'verb' found" +> - /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" -### Container Guide, Documentation for Faster Redeploy +## 💾 Persistence -In the Container Guide, documentation for developers on how to quickly redeploy code has been added for Netbeans and improved for IntelliJ. +### Missing Database Constraints -Also in the context of containers, a new option to skip deployment has been added and the war file is now consistently named "dataverse.war" rather than having a version in the filename, such as "dataverse-6.1.war". This predictability makes tooling easier. +This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running -Finally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running `pip install -r requirements.txt`. +``` +SELECT uri, count(*) FROM externalvocabularyvaluet group by uri; +``` +and +``` +SELECT spec, count(*) FROM oaiset group by spec; +``` +and then removing any duplicate rows (where count>1). + +TODO: Whoever puts the release notes together should make sure there is the standard note about reloading metadata blocks for the citation, astrophysics, and biomedical blocks (plus any others from other PRs) after upgrading. ### Universe Field in Variablemetadata Table Changed -Universe field in variablemetadata table was changed from varchar(255) to text. The change was made to support longer strings in "universe" metadata field, similar to the rest of text fields in variablemetadata table. +Universe field in variablemetadata table was changed from **varchar(255)** to **text**. The change was made to support longer strings in "universe" metadata field, similar to the rest of text fields in variablemetadata table. ### Postgres Versions @@ -187,80 +200,67 @@ This release adds install script support for the new permissions model in Postgr Postgres 13 remains the version used with automated testing. +## 🌐 API + ### Listing Collection/Dataverse API Listing collection/dataverse role assignments via API still requires ManageDataversePermissions, but listing dataset role assignments via API now requires only ManageDatasetPermissions. -### Missing Database Constraints +### New API Endpoint for Clearing an Individual Dataset From Solr -This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running +A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. -`SELECT uri, count(*) FROM externalvocabularyvaluet group by uri;` +### New Accounts Metrics API -and +Users can retrieve new types of metrics related to user accounts. The new capabilities are [described](https://guides.dataverse.org/en/6.2/api/metrics.html) in the guides. -`SELECT spec, count(*) FROM oaiset group by spec;` +### New canDownloadAtLeastOneFile Endpoint -and then removing any duplicate rows (where count>1). +The `/api/datasets/{id}/versions/{versionId}/canDownloadAtLeastOneFile` endpoint has been created. -TODO: Whoever puts the release notes together should make sure there is the standard note about reloading metadata blocks for the citation, astrophysics, and biomedical blocks (plus any others from other PRs) after upgrading. +This API endpoint indicates if the calling user can download at least one file from a dataset version. Note that Shibboleth group permissions are not considered. -### Harvesting Client API +### Harvesting Client Endpoint Extended The API endpoint `api/harvest/clients/{harvestingClientNickname}` has been extended to include the following fields: -- `allowHarvestingMissingCVV`: enable/disable allowing datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. Default is false. -Note: This setting is only available to the API and not currently accessible/settable via the UI - - - -### New Accounts Metrics API - -Users can retrieve new types of metrics related to user accounts. The new capabilities are [described](https://guides.dataverse.org/en/6.2/api/metrics.html) in the guides. - -### New canDownloadAtLeastOneFile API +- **allowHarvestingMissingCVV**: enable/disable allowing datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. Default is false. -The GET canDownloadAtLeastOneFile (/api/datasets/{id}/versions/{versionId}/canDownloadAtLeastOneFile) endpoint has been created. - -This API endpoint indicates if the calling user can download at least one file from a dataset version. Note that Shibboleth group permissions are not considered. +*Note: This setting is only available to the API and not currently accessible/settable via the UI* -### Extended getVersionFiles API +### Version Files Endpoint Extended -The response for getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) endpoint has been modified to include a total count of records available (totalCount:x). +The response for getVersionFiles `/api/datasets/{id}/versions/{versionId}/files` endpoint has been modified to include a total count of records available **totalCount:x**. This will aid in pagination by allowing the caller to know how many pages can be iterated through. The existing API (getVersionFileCounts) to return the count will still be available. -### Extended Metadata Blocks API +### Metadata Blocks Endpoint Extended The API endpoint `/api/metadatablocks/{block_id}` has been extended to include the following fields: -- `isRequired`: Whether or not this field is required -- `displayOrder`: The display order of the field in create/edit forms -- `typeClass`: The type class of this field ("controlledVocabulary", "compound", or "primitive") - -### Evaluation Version Tutorial on the Containers Guide - -The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container +- **isRequired**: Whether or not this field is required +- **displayOrder**: The display order of the field in create/edit forms +- **typeClass**: The type class of this field ("controlledVocabulary", "compound", or "primitive") ### Get File Citation As JSON It is now possible to retrieve via API the file citation as it appears on the file landing page. It is formatted in HTML and encoded in JSON. -This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. This functionality has been requested in https://github.com/IQSS/dataverse/issues/3140 and https://github.com/IQSS/dataverse/issues/9994 +This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. This functionality has been requested in [#3140](https://github.com/IQSS/dataverse/issues/3140) and [#9994](https://github.com/IQSS/dataverse/issues/9994) -### Extended Files API +### Files Endpoint Extended The API endpoint `api/files/{id}` has been extended to support the following optional query parameters: -- `includeDeaccessioned`: Indicates whether or not to consider deaccessioned dataset versions in the latest file search. (Default: `false`). -- `returnDatasetVersion`: Indicates whether or not to include the dataset version of the file in the response. (Default: `false`). +- **includeDeaccessioned**: Indicates whether or not to consider deaccessioned dataset versions in the latest file search. (Default: `false`). +- **returnDatasetVersion**: Indicates whether or not to include the dataset version of the file in the response. (Default: `false`). -A new endpoint `api/files/{id}/versions/{datasetVersionId}` has been created. This endpoint returns the file metadata present in the requested dataset version. To specify the dataset version, you can use ``:latest-published``, or ``:latest``, or ``:draft`` or ``1.0`` or any other available version identifier. +A new endpoint `api/files/{id}/versions/{datasetVersionId}` has been created. This endpoint returns the file metadata present in the requested dataset version. To specify the dataset version, you can use `:latest-published`, `:latest`, `:draft` or `1.0` or any other available version identifier. -The endpoint supports the `includeDeaccessioned` and `returnDatasetVersion` optional query parameters, as does the `api/files/{id}` endpoint. +The endpoint supports the *includeDeaccessioned* and *returnDatasetVersion* optional query parameters, as does the `api/files/{id}` endpoint. `api/files/{id}/draft` endpoint is no longer available in favor of the new endpoint `api/files/{id}/versions/{datasetVersionId}`, which can use the version identifier ``:draft`` (`api/files/{id}/versions/:draft`) to obtain the same result. -### Endpoint Extended: Datasets, Dataverse Collections, and Datafiles +### Datasets, Dataverse Collections, and Datafiles Endpoints Extended The API endpoints for getting datasets, Dataverse collections, and datafiles have been extended to support the following optional 'returnOwners' query parameter. @@ -270,20 +270,20 @@ Including the parameter and setting it to true will add a hierarchy showing whic The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. -### Uningest/Reingest Options Available in the File Page Edit Menu +## 📖 Guides -New Uningest/Reingest options are available in the File Page Edit menu, allowing ingest errors to be cleared (by users who can published the associated dataset) -and (by superusers) for a successful ingest to be undone or retried (e.g. after a Dataverse version update or if ingest size limits are changed). -The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. +### New QA Guide -### Publication Status Facet Restored +A new QA Guide is intended mostly for the core development team but may be of interest to contributors. -In version 6.1, the publication status facet location was unintentionally moved to the bottom. In this version, we have restored the original order. +### Container Guide, Documentation for Faster Redeploy -### Permissions Required To Assign a Role Have Been Fixed +In the Container Guide, documentation for developers on how to quickly redeploy code has been added for Netbeans and improved for IntelliJ. -The permissions required to assign a role have been fixed. It is no longer possible to assign a role that includes permissions that the assigning user doesn't have. +Also in the context of containers, a new option to skip deployment has been added and the war file is now consistently named "dataverse.war" rather than having a version in the filename, such as "dataverse-6.1.war". This predictability makes tooling easier. -### New QA Guide +Finally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running `pip install -r requirements.txt`. -A new QA Guide is intended mostly for the core development team but may be of interest to contributors. +### Evaluation Version Tutorial on the Containers Guide + +The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container \ No newline at end of file From ccc839d7ba492cf511c217765c0c5f60e1ea2f1e Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 14:58:48 -0400 Subject: [PATCH 0973/1112] Spacing --- doc/release-notes/6.2-release-notes.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index f3f892ce4e2..7eb162c20bc 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -5,6 +5,7 @@ Please note: To read these instructions in full, please go to https://github.com This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + ## 💡Release highlights ### Return to Author Now Requires a Reason @@ -67,8 +68,6 @@ If neither setting exists rate limiting is disabled. In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." - - ``` curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{ "tier": 0, @@ -150,6 +149,7 @@ New Uningest/Reingest options are available in the File Page Edit menu. Ingest e The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. + ## 🪲 Bugs ### Publication Status Facet Restored @@ -173,6 +173,7 @@ OAI-PMH error handling has been improved to display a machine-readable error in > - /oai?foo=bar will show "No argument 'verb' found" > - /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" + ## 💾 Persistence ### Missing Database Constraints @@ -200,6 +201,7 @@ This release adds install script support for the new permissions model in Postgr Postgres 13 remains the version used with automated testing. + ## 🌐 API ### Listing Collection/Dataverse API @@ -270,6 +272,7 @@ Including the parameter and setting it to true will add a hierarchy showing whic The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. + ## 📖 Guides ### New QA Guide From bcddcf6a9f4ae4b4bd9048aa702019b5110264d5 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 14:59:48 -0400 Subject: [PATCH 0974/1112] Bugs --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 7eb162c20bc..70b64c15550 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -150,7 +150,7 @@ New Uningest/Reingest options are available in the File Page Edit menu. Ingest e The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. -## 🪲 Bugs +## 🪲 Bug fixes ### Publication Status Facet Restored From 09d746d9cf2d9fa9558016a8f9194c8fdfa7d4bd Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 26 Mar 2024 16:20:43 -0400 Subject: [PATCH 0975/1112] 7424 Updated --- doc/release-notes/6.2-release-notes.md | 25 +++++++++++++++++++++++++ doc/release-notes/7424-mailsession.md | 24 ------------------------ 2 files changed, 25 insertions(+), 24 deletions(-) delete mode 100644 doc/release-notes/7424-mailsession.md diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 70b64c15550..519f342b2d0 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -111,6 +111,31 @@ curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndActi Hazelcast is configured in Payara and should not need any changes for this feature +## Simplified SMTP configuration + +With this release, we deprecate the usage of `asadmin create-javamail-resource` to configure Dataverse to send mail using your SMTP server and provide a simplified, standard alternative using JVM options or MicroProfile Config. + +At this point, no action is required if you want to keep your current configuration. +Warnings will show in your server logs to inform and remind you about the deprecation. +A future major release of Dataverse may remove this way of configuration. + +Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [SMTP/Email Configuration](https://guides.dataverse.org/en/6.2/installation/config.html#smtp-email-configuration) section of the Installation Guide. + +Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. + +Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. + +List of options added: +> - dataverse.mail.system-email +> - dataverse.mail.mta.host +> - dataverse.mail.mta.port +> - dataverse.mail.mta.ssl.enable +> - dataverse.mail.mta.auth +> - dataverse.mail.mta.user +> - dataverse.mail.mta.password +> - dataverse.mail.mta.allow-utf8-addresses +> - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). + ### Binder Redirect If your installation is configured to use Binder, you should remove the old "girder_ythub" tool and replace it with the tool described at https://github.com/IQSS/dataverse-binder-redirect diff --git a/doc/release-notes/7424-mailsession.md b/doc/release-notes/7424-mailsession.md deleted file mode 100644 index 67c876f7ad5..00000000000 --- a/doc/release-notes/7424-mailsession.md +++ /dev/null @@ -1,24 +0,0 @@ -## Simplified SMTP configuration - -With this release, we deprecate the usage of `asadmin create-javamail-resource` to configure Dataverse to send mail using your SMTP server and provide a simplified, standard alternative using JVM options or MicroProfile Config. - -At this point, no action is required if you want to keep your current configuration. -Warnings will show in your server logs to inform and remind you about the deprecation. -A future major release of Dataverse may remove this way of configuration. - -Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [SMTP/Email Configuration](https://guides.dataverse.org/en/6.2/installation/config.html#smtp-email-configuration) section of the Installation Guide. - -Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. - -Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. - -List of options added: -- dataverse.mail.system-email -- dataverse.mail.mta.host -- dataverse.mail.mta.port -- dataverse.mail.mta.ssl.enable -- dataverse.mail.mta.auth -- dataverse.mail.mta.user -- dataverse.mail.mta.password -- dataverse.mail.mta.allow-utf8-addresses -- Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). \ No newline at end of file From 797bc38d4e43807dd16d62af27bca97fa8b137b5 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 27 Mar 2024 09:54:43 +0100 Subject: [PATCH 0976/1112] moved indexed time by 3 hours to prevent false negatives in isIndexedVersion test --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 2e4cb56db48..98069b31c54 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -790,11 +790,15 @@ public boolean isIndexedVersion() { return isIndexedVersion = false; } // If this is the latest published version, we want to confirm that this - // version was successfully indexed after the last publication - + // version was successfully indexed after the last publication + // We add 3 hours to the indexed time to prevent false negatives + // when indexed time gets overwritten in finalizing the publication step + // by a value before the release time + final long duration = 3 * 60 * 60 * 1000; + final Timestamp movedIndexTime = new Timestamp(workingVersion.getDataset().getIndexTime().getTime() + duration); if (isThisLatestReleasedVersion()) { return isIndexedVersion = (workingVersion.getDataset().getIndexTime() != null) - && workingVersion.getDataset().getIndexTime().after(workingVersion.getReleaseTime()); + && movedIndexTime.after(workingVersion.getReleaseTime()); } // Drafts don't have the indextime stamps set/incremented when indexed, From 3cc552dd4767d73120bbe4e37c66f3beae4691e1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Mar 2024 10:04:01 +0000 Subject: [PATCH 0977/1112] Added: extended MetadataBlocks API for obtaining detailed metadata blocks and only displayed on create --- .../dataverse/MetadataBlockServiceBean.java | 50 +++++++++++++------ .../iq/dataverse/api/MetadataBlocks.java | 27 +++++----- .../iq/dataverse/util/json/JsonPrinter.java | 36 +++++++++---- 3 files changed, 73 insertions(+), 40 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java index bb6daa264ba..40b52129897 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java @@ -1,43 +1,63 @@ package edu.harvard.iq.dataverse; -import java.util.List; import jakarta.ejb.Stateless; import jakarta.inject.Named; import jakarta.persistence.EntityManager; import jakarta.persistence.NoResultException; import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.*; + +import java.util.List; /** - * * @author michael */ @Stateless @Named public class MetadataBlockServiceBean { - + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; - + public MetadataBlock save(MetadataBlock mdb) { - return em.merge(mdb); - } - - + return em.merge(mdb); + } + public List listMetadataBlocks() { + return listMetadataBlocks(false); + } + + public List listMetadataBlocks(boolean onlyDisplayedOnCreate) { + if (onlyDisplayedOnCreate) { + return listMetadataBlocksDisplayedOnCreate(); + } return em.createNamedQuery("MetadataBlock.listAll", MetadataBlock.class).getResultList(); } - - public MetadataBlock findById( Long id ) { + + public MetadataBlock findById(Long id) { return em.find(MetadataBlock.class, id); } - - public MetadataBlock findByName( String name ) { + + public MetadataBlock findByName(String name) { try { return em.createNamedQuery("MetadataBlock.findByName", MetadataBlock.class) - .setParameter("name", name) - .getSingleResult(); - } catch ( NoResultException nre ) { + .setParameter("name", name) + .getSingleResult(); + } catch (NoResultException nre) { return null; } } + + private List listMetadataBlocksDisplayedOnCreate() { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(MetadataBlock.class); + Root metadataBlockRoot = criteriaQuery.from(MetadataBlock.class); + Join datasetFieldTypeJoin = metadataBlockRoot.join("datasetFieldTypes"); + Predicate displayOnCreatePredicate = criteriaBuilder.isTrue(datasetFieldTypeJoin.get("displayOnCreate")); + criteriaQuery.where(displayOnCreatePredicate); + criteriaQuery.select(metadataBlockRoot).distinct(true); + TypedQuery typedQuery = em.createQuery(criteriaQuery); + return typedQuery.getResultList(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java index 448fb48e389..2d643bb9798 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java @@ -1,34 +1,33 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.MetadataBlock; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.Produces; +import jakarta.ws.rs.*; import jakarta.ws.rs.core.Response; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import jakarta.ws.rs.PathParam; + +import java.util.List; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; /** * Api bean for managing metadata blocks. + * * @author michael */ @Path("metadatablocks") @Produces("application/json") public class MetadataBlocks extends AbstractApiBean { - + @GET - public Response list() { - return ok(metadataBlockSvc.listMetadataBlocks().stream().map(brief::json).collect(toJsonArray())); + public Response list(@QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, + @QueryParam("returnDetailedData") boolean returnDetailedData) { + List metadataBlocks = metadataBlockSvc.listMetadataBlocks(onlyDisplayedOnCreate); + return ok(json(metadataBlocks, returnDetailedData, onlyDisplayedOnCreate)); } - + @Path("{identifier}") @GET - public Response getBlock( @PathParam("identifier") String idtf ) { + public Response getBlock(@PathParam("identifier") String idtf) { MetadataBlock b = findMetadataBlock(idtf); - - return (b != null ) ? ok(json(b)) : notFound("Can't find metadata block '" + idtf + "'"); + return (b != null) ? ok(json(b)) : notFound("Can't find metadata block '" + idtf + "'"); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c38c1610db6..5cecc38d1f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -580,6 +580,14 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie return blockBld; } + public static JsonArrayBuilder json(List metadataBlocks, boolean returnDetailedData, boolean onlyDisplayedOnCreate) { + JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); + for (MetadataBlock metadataBlock : metadataBlocks) { + arrayBuilder.add(returnDetailedData ? json(metadataBlock, onlyDisplayedOnCreate) : brief.json(metadataBlock)); + } + return arrayBuilder; + } + public static String typeClassString(DatasetFieldType typ) { if (typ.isControlledVocabulary()) { return "controlledVocabulary"; @@ -602,21 +610,27 @@ public static JsonObject json(DatasetField dfv) { } } - public static JsonObjectBuilder json(MetadataBlock blk) { - JsonObjectBuilder bld = jsonObjectBuilder(); - bld.add("id", blk.getId()); - bld.add("name", blk.getName()); - bld.add("displayName", blk.getDisplayName()); - bld.add("displayOnCreate", blk.isDisplayOnCreate()); + public static JsonObjectBuilder json(MetadataBlock metadataBlock) { + return json(metadataBlock, false); + } - JsonObjectBuilder fieldsBld = jsonObjectBuilder(); - for (DatasetFieldType df : new TreeSet<>(blk.getDatasetFieldTypes())) { - fieldsBld.add(df.getName(), JsonPrinter.json(df)); + public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean onlyDisplayedOnCreate) { + JsonObjectBuilder jsonObjectBuilder = jsonObjectBuilder(); + jsonObjectBuilder.add("id", metadataBlock.getId()); + jsonObjectBuilder.add("name", metadataBlock.getName()); + jsonObjectBuilder.add("displayName", metadataBlock.getDisplayName()); + jsonObjectBuilder.add("displayOnCreate", metadataBlock.isDisplayOnCreate()); + + JsonObjectBuilder fieldsBuilder = jsonObjectBuilder(); + for (DatasetFieldType datasetFieldType : new TreeSet<>(metadataBlock.getDatasetFieldTypes())) { + if (!onlyDisplayedOnCreate || datasetFieldType.isDisplayOnCreate()) { + fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType)); + } } - bld.add("fields", fieldsBld); + jsonObjectBuilder.add("fields", fieldsBuilder); - return bld; + return jsonObjectBuilder; } public static JsonObjectBuilder json(DatasetFieldType fld) { From 04add614a6b26311d9f23f256914651ec4b52ec9 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Mar 2024 11:39:17 +0000 Subject: [PATCH 0978/1112] Refactor: renamed MetadataBlocks APIs and ITs --- .../harvard/iq/dataverse/api/MetadataBlocks.java | 6 +++--- .../harvard/iq/dataverse/api/MetadataBlocksIT.java | 14 +++++++++++++- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java index 2d643bb9798..1732dd2ec01 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java @@ -18,15 +18,15 @@ public class MetadataBlocks extends AbstractApiBean { @GET - public Response list(@QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, - @QueryParam("returnDetailedData") boolean returnDetailedData) { + public Response listMetadataBlocks(@QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, + @QueryParam("returnDetailedData") boolean returnDetailedData) { List metadataBlocks = metadataBlockSvc.listMetadataBlocks(onlyDisplayedOnCreate); return ok(json(metadataBlocks, returnDetailedData, onlyDisplayedOnCreate)); } @Path("{identifier}") @GET - public Response getBlock(@PathParam("identifier") String idtf) { + public Response getMetadataBlock(@PathParam("identifier") String idtf) { MetadataBlock b = findMetadataBlock(idtf); return (b != null) ? ok(json(b)) : notFound("Can't find metadata block '" + idtf + "'"); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index 39152bccad8..556e0a6ea74 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -20,7 +20,19 @@ public static void setUpClass() { } @Test - void testGetCitationBlock() { + void testListMetadataBlocks() { + Response getCitationBlock = UtilIT.getMetadataBlock("citation"); + getCitationBlock.prettyPrint(); + getCitationBlock.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.fields.subject.controlledVocabularyValues[0]", CoreMatchers.is("Agricultural Sciences")) + .body("data.fields.title.displayOrder", CoreMatchers.is(0)) + .body("data.fields.title.typeClass", CoreMatchers.is("primitive")) + .body("data.fields.title.isRequired", CoreMatchers.is(true)); + } + + @Test + void testGetMetadataBlock() { Response getCitationBlock = UtilIT.getMetadataBlock("citation"); getCitationBlock.prettyPrint(); getCitationBlock.then().assertThat() From 62c68f122b743a109cc113a67c25f413d8df897d Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Mar 2024 13:03:55 +0000 Subject: [PATCH 0979/1112] Added: ITs for listMetadataBlocks endpoint --- .../iq/dataverse/api/MetadataBlocksIT.java | 39 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 7 ++++ 2 files changed, 39 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index 556e0a6ea74..a293da28c95 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -8,6 +8,8 @@ import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assumptions.assumeFalse; import static org.junit.jupiter.api.Assumptions.assumeTrue; @@ -21,14 +23,37 @@ public static void setUpClass() { @Test void testListMetadataBlocks() { - Response getCitationBlock = UtilIT.getMetadataBlock("citation"); - getCitationBlock.prettyPrint(); - getCitationBlock.then().assertThat() + // No optional params enabled + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, false); + int expectedDefaultNumberOfMetadataBlocks = 6; + listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.fields.subject.controlledVocabularyValues[0]", CoreMatchers.is("Agricultural Sciences")) - .body("data.fields.title.displayOrder", CoreMatchers.is(0)) - .body("data.fields.title.typeClass", CoreMatchers.is("primitive")) - .body("data.fields.title.isRequired", CoreMatchers.is(true)); + .body("data[0].fields", equalTo(null)) + .body("data.size()", equalTo(expectedDefaultNumberOfMetadataBlocks)); + + // onlyDisplayedOnCreate=true + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(true, false); + int expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks = 1; + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fields", equalTo(null)) + .body("data[0].displayName", equalTo("Citation Metadata")) + .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks)); + + // returnDetailedData=true + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, true); + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fields", not(equalTo(null))) + .body("data.size()", equalTo(expectedDefaultNumberOfMetadataBlocks)); + + // onlyDisplayedOnCreate=true and returnDetailedData=true + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(true, true); + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fields", not(equalTo(null))) + .body("data[0].displayName", equalTo("Citation Metadata")) + .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 080ca0c43e9..432ac3353e8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -647,6 +647,13 @@ static Response setMetadataBlocks(String dataverseAlias, JsonArrayBuilder blocks .post("/api/dataverses/" + dataverseAlias + "/metadatablocks"); } + static Response listMetadataBlocks(boolean onlyDisplayedOnCreate, boolean returnDetailedData) { + return given() + .queryParam("onlyDisplayedOnCreate", onlyDisplayedOnCreate) + .queryParam("returnDetailedData", returnDetailedData) + .get("/api/metadatablocks"); + } + static Response getMetadataBlock(String block) { return given() .get("/api/metadatablocks/" + block); From 06c82a8cefc390bd0742e453bf2b82853bbeccbe Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Wed, 27 Mar 2024 09:09:10 -0400 Subject: [PATCH 0980/1112] Update doc/release-notes/6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 519f342b2d0..3fdb25d8bfb 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -111,7 +111,7 @@ curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndActi Hazelcast is configured in Payara and should not need any changes for this feature -## Simplified SMTP configuration +### Simplified SMTP configuration With this release, we deprecate the usage of `asadmin create-javamail-resource` to configure Dataverse to send mail using your SMTP server and provide a simplified, standard alternative using JVM options or MicroProfile Config. From 138dc12a7be2e0d19cfa9c69290e84960d5fa047 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Mar 2024 13:22:42 +0000 Subject: [PATCH 0981/1112] Added: displayOnCreate field to brief json MetadataBlock --- .../edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java index 3fcaf6b11ff..c16a46a1765 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java @@ -28,6 +28,7 @@ public JsonObjectBuilder json( MetadataBlock blk ) { ? null : jsonObjectBuilder().add("id", blk.getId()) .add("displayName", blk.getDisplayName()) + .add("displayOnCreate", blk.isDisplayOnCreate()) .add("name", blk.getName()) ; } From ca043eb689f49106dbc7555679bd0a1413d36d3f Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Mar 2024 13:29:34 +0000 Subject: [PATCH 0982/1112] Changed: renamed optional query param in metadatablocks endpoint --- .../java/edu/harvard/iq/dataverse/api/MetadataBlocks.java | 4 ++-- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 4 ++-- .../java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java | 4 ++-- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java index 1732dd2ec01..8861abd4803 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java @@ -19,9 +19,9 @@ public class MetadataBlocks extends AbstractApiBean { @GET public Response listMetadataBlocks(@QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, - @QueryParam("returnDetailedData") boolean returnDetailedData) { + @QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes) { List metadataBlocks = metadataBlockSvc.listMetadataBlocks(onlyDisplayedOnCreate); - return ok(json(metadataBlocks, returnDetailedData, onlyDisplayedOnCreate)); + return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate)); } @Path("{identifier}") diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 5cecc38d1f1..c7fb78e01e6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -580,10 +580,10 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie return blockBld; } - public static JsonArrayBuilder json(List metadataBlocks, boolean returnDetailedData, boolean onlyDisplayedOnCreate) { + public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean onlyDisplayedOnCreate) { JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for (MetadataBlock metadataBlock : metadataBlocks) { - arrayBuilder.add(returnDetailedData ? json(metadataBlock, onlyDisplayedOnCreate) : brief.json(metadataBlock)); + arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, onlyDisplayedOnCreate) : brief.json(metadataBlock)); } return arrayBuilder; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index a293da28c95..5f5a7fbc0f8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -40,14 +40,14 @@ void testListMetadataBlocks() { .body("data[0].displayName", equalTo("Citation Metadata")) .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks)); - // returnDetailedData=true + // returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, true); listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) .body("data.size()", equalTo(expectedDefaultNumberOfMetadataBlocks)); - // onlyDisplayedOnCreate=true and returnDetailedData=true + // onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(true, true); listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 432ac3353e8..2f94bd714c0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -647,10 +647,10 @@ static Response setMetadataBlocks(String dataverseAlias, JsonArrayBuilder blocks .post("/api/dataverses/" + dataverseAlias + "/metadatablocks"); } - static Response listMetadataBlocks(boolean onlyDisplayedOnCreate, boolean returnDetailedData) { + static Response listMetadataBlocks(boolean onlyDisplayedOnCreate, boolean returnDatasetFieldTypes) { return given() .queryParam("onlyDisplayedOnCreate", onlyDisplayedOnCreate) - .queryParam("returnDetailedData", returnDetailedData) + .queryParam("returnDatasetFieldTypes", returnDatasetFieldTypes) .get("/api/metadatablocks"); } From b4dac0d902b10cde3b99d95541231fc62007d261 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Mar 2024 13:29:45 +0000 Subject: [PATCH 0983/1112] Added: docs for #10389 --- doc/sphinx-guides/source/api/native-api.rst | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5c34543d6aa..d57962c1ce7 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4455,6 +4455,25 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/metadatablocks" +This endpoint supports the following optional query parameters: + +- ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. +- ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false. + +An example using the optional query parameters is presented below: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + + curl "$SERVER_URL/api/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true" + Show Info About Single Metadata Block ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 3b7e729d5092ed43dfb12f4a6829ed87d2871852 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 09:45:18 -0400 Subject: [PATCH 0984/1112] First round changes --- doc/release-notes/6.2-release-notes.md | 85 +++++++++++++++----------- 1 file changed, 49 insertions(+), 36 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 3fdb25d8bfb..5e211f96284 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -5,8 +5,15 @@ Please note: To read these instructions in full, please go to https://github.com This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. +### Search and Facet by License -## 💡Release highlights +A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. + +Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. + +For more information, see [#10204](https://github.com/IQSS/dataverse/issues/10204). + +## 💡Release Highlights ### Return to Author Now Requires a Reason @@ -14,9 +21,10 @@ The Popup for returning to author now requires a reason that will be sent by ema Please note that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. + ### Support for Using Multiple PID Providers -This release adds support for using multiple PID (DOI, Handle, PermalLink) providers, multiple PID provider accounts +This release adds support for using multiple PID (DOI, Handle, PermaLink) providers, multiple PID provider accounts (managing a given protocol, authority,separator, shoulder combination), assigning PID provider accounts to specific collections, and supporting transferred PIDs (where a PID is managed by an account when it's authority, separator, and/or shoulder don't match the combination where the account can mint new PIDs). It also adds the ability for additional provider services beyond the existing @@ -26,33 +34,8 @@ These changes require per-provider settings rather than the global PID settings for installations using a single PID Provider account is provided, updating to use the new microprofile settings is highly recommended and will be required in a future version. -New microprofile settings (where * indicates a provider id indicating which provider the setting is for): +[New microprofile settings](#new-microprofile-settings) -> - dataverse.pid.providers -> - dataverse.pid.default-provider -> - dataverse.pid.*.type -> - dataverse.pid.*.label -> - dataverse.pid.*.authority -> - dataverse.pid.*.shoulder -> - dataverse.pid.*.identifier-generation-style -> - dataverse.pid.*.datafile-pid-format -> - dataverse.pid.*.managed-list -> - dataverse.pid.*.excluded-list -> - dataverse.pid.*.datacite.mds-api-url -> - dataverse.pid.*.datacite.rest-api-url -> - dataverse.pid.*.datacite.username -> - dataverse.pid.*.datacite.password -> - dataverse.pid.*.ezid.api-url -> - dataverse.pid.*.ezid.username -> - dataverse.pid.*.ezid.password -> - dataverse.pid.*.permalink.base-url -> - dataverse.pid.*.permalink.separator -> - dataverse.pid.*.handlenet.index -> - dataverse.pid.*.handlenet.independent-service -> - dataverse.pid.*.handlenet.auth-handle -> - dataverse.pid.*.handlenet.key.path -> - dataverse.pid.*.handlenet.key.passphrase -> - dataverse.spi.pidproviders.directory ### Rate Limiting Using JCache (With Hazelcast As Provided by Payara) @@ -125,7 +108,8 @@ Once reconfiguration is complete, you should remove legacy, unused config. First Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. -List of options added: +For a list of new settings see the section below: + > - dataverse.mail.system-email > - dataverse.mail.mta.host > - dataverse.mail.mta.port @@ -146,13 +130,7 @@ For more information, see [#10360](https://github.com/IQSS/dataverse/issues/1036 When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the **<head>** of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. -### Search by License -A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. - -Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. - -For more information, see [#10204](https://github.com/IQSS/dataverse/issues/10204). ### Add .QPJ and .QMD Extensions to Shapefile Handling @@ -314,4 +292,39 @@ Finally, an option to create tabs in the guides using [Sphinx Tabs](https://sphi ### Evaluation Version Tutorial on the Containers Guide -The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container \ No newline at end of file +The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container + +*** + +# New Settings + + +### New microprofile settings : [Go back](#multiple-pid-sup) + +*The * indicates a provider id indicating which provider the setting is for* + +> - dataverse.pid.providers +> - dataverse.pid.default-provider +> - dataverse.pid.*.type +> - dataverse.pid.*.label +> - dataverse.pid.*.authority +> - dataverse.pid.*.shoulder +> - dataverse.pid.*.identifier-generation-style +> - dataverse.pid.*.datafile-pid-format +> - dataverse.pid.*.managed-list +> - dataverse.pid.*.excluded-list +> - dataverse.pid.*.datacite.mds-api-url +> - dataverse.pid.*.datacite.rest-api-url +> - dataverse.pid.*.datacite.username +> - dataverse.pid.*.datacite.password +> - dataverse.pid.*.ezid.api-url +> - dataverse.pid.*.ezid.username +> - dataverse.pid.*.ezid.password +> - dataverse.pid.*.permalink.base-url +> - dataverse.pid.*.permalink.separator +> - dataverse.pid.*.handlenet.index +> - dataverse.pid.*.handlenet.independent-service +> - dataverse.pid.*.handlenet.auth-handle +> - dataverse.pid.*.handlenet.key.path +> - dataverse.pid.*.handlenet.key.passphrase +> - dataverse.spi.pidproviders.directory \ No newline at end of file From 971094e59219c66475cb6160d8a54444e47447e6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Mar 2024 14:57:38 +0000 Subject: [PATCH 0985/1112] Fixed: BriefJsonPrinterTest --- .../util/json/BriefJsonPrinterTest.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java index b426f84a464..fc458d88acd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java @@ -1,19 +1,16 @@ package edu.harvard.iq.dataverse.util.json; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldConstant; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.mocks.MocksFactory; import edu.harvard.iq.dataverse.workflow.Workflow; import jakarta.json.JsonObject; import org.junit.jupiter.api.Test; import java.util.Collections; +import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; /** * @@ -58,14 +55,16 @@ public void testJson_MetadataBlock() { mtb.setId(1L); mtb.setName("metadata_block_name"); mtb.setDisplayName("Metadata Block Name"); + mtb.setDatasetFieldTypes(List.of(new DatasetFieldType("JustAString", DatasetFieldType.FieldType.TEXT, false))); BriefJsonPrinter sut = new BriefJsonPrinter(); JsonObject res = sut.json(mtb).build(); assertEquals("Metadata Block Name", res.getString("displayName")); - assertEquals("metadata_block_name", res.getString("name")); - assertEquals(1, res.getInt("id")); - assertEquals(3, res.keySet().size()); + assertEquals("metadata_block_name", res.getString("name")); + assertFalse(res.getBoolean("displayOnCreate")); + assertEquals(1, res.getInt("id")); + assertEquals(4, res.keySet().size()); } /** From b196f54f9d1be40b6c4c92b5b6c2263f779bc3a0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 27 Mar 2024 11:40:22 -0400 Subject: [PATCH 0986/1112] add spaces before default and inherited in parens #10390 --- src/main/java/edu/harvard/iq/dataverse/DataversePage.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index f35682b7bd0..a299bcd4227 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -1307,11 +1307,11 @@ public Set> getPidProviderOptions() { String label = null; if (this.dataverse.getOwner() != null && this.dataverse.getOwner().getEffectivePidGenerator()!= null) { PidProvider inheritedPidProvider = this.dataverse.getOwner().getEffectivePidGenerator(); - label = inheritedPidProvider.getLabel() + BundleUtil.getStringFromBundle("dataverse.inherited") + ": " + label = inheritedPidProvider.getLabel() + " " + BundleUtil.getStringFromBundle("dataverse.inherited") + ": " + inheritedPidProvider.getProtocol() + ":" + inheritedPidProvider.getAuthority() + inheritedPidProvider.getSeparator() + inheritedPidProvider.getShoulder(); } else { - label = defaultPidProvider.getLabel() + BundleUtil.getStringFromBundle("dataverse.default") + ": " + label = defaultPidProvider.getLabel() + " " + BundleUtil.getStringFromBundle("dataverse.default") + ": " + defaultPidProvider.getProtocol() + ":" + defaultPidProvider.getAuthority() + defaultPidProvider.getSeparator() + defaultPidProvider.getShoulder(); } From 0769ee9de49bffa8eca042e39b1668565330ae8f Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 27 Mar 2024 17:00:35 +0100 Subject: [PATCH 0987/1112] nullpointer fix --- .../edu/harvard/iq/dataverse/DatasetPage.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 98069b31c54..6af1872b63b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -791,14 +791,16 @@ public boolean isIndexedVersion() { } // If this is the latest published version, we want to confirm that this // version was successfully indexed after the last publication - // We add 3 hours to the indexed time to prevent false negatives - // when indexed time gets overwritten in finalizing the publication step - // by a value before the release time - final long duration = 3 * 60 * 60 * 1000; - final Timestamp movedIndexTime = new Timestamp(workingVersion.getDataset().getIndexTime().getTime() + duration); if (isThisLatestReleasedVersion()) { - return isIndexedVersion = (workingVersion.getDataset().getIndexTime() != null) - && movedIndexTime.after(workingVersion.getReleaseTime()); + if (workingVersion.getDataset().getIndexTime() == null) { + return isIndexedVersion = false; + } + // We add 3 hours to the indexed time to prevent false negatives + // when indexed time gets overwritten in finalizing the publication step + // by a value before the release time + final long duration = 3 * 60 * 60 * 1000; + final Timestamp movedIndexTime = new Timestamp(workingVersion.getDataset().getIndexTime().getTime() + duration); + return isIndexedVersion = movedIndexTime.after(workingVersion.getReleaseTime()); } // Drafts don't have the indextime stamps set/incremented when indexed, From 1479403d9ac92145edbb806cb798f1ef52240219 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 27 Mar 2024 17:18:40 +0100 Subject: [PATCH 0988/1112] quick info on the new metrics added for indexing --- doc/sphinx-guides/source/admin/monitoring.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/sphinx-guides/source/admin/monitoring.rst b/doc/sphinx-guides/source/admin/monitoring.rst index 04fba23a3e8..ef306c88c6f 100644 --- a/doc/sphinx-guides/source/admin/monitoring.rst +++ b/doc/sphinx-guides/source/admin/monitoring.rst @@ -150,3 +150,11 @@ Tips: - It's possible to view and act on **RDS Events** such as snapshots, parameter changes, etc. See `Working with Amazon RDS events `_ for details. - RDS monitoring is available via API and the ``aws`` command line tool. For example, see `Retrieving metrics with the Performance Insights API `_. - To play with monitoring RDS using a server configured by `dataverse-ansible `_ set ``use_rds`` to true to skip some steps that aren't necessary when using RDS. See also the :doc:`/developers/deployment` section of the Developer Guide. + +MicroProfile Metrics endpoint +----------------------------- + +Payara provides the metrics endpoint: _ +The metrics you can retrieve that way: +- `index_permit_wait_time_seconds_mean` displays how long does it take to receive a permit to index a dataset. +- `index_time_seconds` displays how long does it take to index a dataset. From ca5506a780389f70e9f18d37d4a081659929d156 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 12:20:34 -0400 Subject: [PATCH 0989/1112] Corrections and instructions for update --- doc/release-notes/6.1-release-notes.md | 3 +- doc/release-notes/6.2-release-notes.md | 162 +++++++++++++++++++++---- 2 files changed, 138 insertions(+), 27 deletions(-) diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md index 1279d09a023..dbeda726aad 100644 --- a/doc/release-notes/6.1-release-notes.md +++ b/doc/release-notes/6.1-release-notes.md @@ -247,7 +247,7 @@ Upgrading requires a maintenance window and downtime. Please plan ahead, create These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.0. -0\. These instructions assume that you are upgrading from 6.0. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to 5.14. +0\. These instructions assume that you are upgrading from 6.0. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to 6.1. If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. @@ -288,6 +288,7 @@ As noted above, deployment of the war file might take several minutes due a data 6a\. Update Citation Metadata Block (to make Alternative Title repeatable) +- `wget https://github.com/IQSS/dataverse/releases/download/v6.1/citation.tsv` - `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` 7\. Upate Solr schema.xml to allow multiple Alternative Titles to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 5e211f96284..529b8e59075 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -17,16 +17,16 @@ For more information, see [#10204](https://github.com/IQSS/dataverse/issues/1020 ### Return to Author Now Requires a Reason -The Popup for returning to author now requires a reason that will be sent by email to the author. +The Popup for returning to author now allows to type in a message to explain the reasons of return and potential edits needed, that will be sent by email to the author. -Please note that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. +Please note that this note is mandatory, but that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. ### Support for Using Multiple PID Providers This release adds support for using multiple PID (DOI, Handle, PermaLink) providers, multiple PID provider accounts (managing a given protocol, authority,separator, shoulder combination), assigning PID provider accounts to specific collections, -and supporting transferred PIDs (where a PID is managed by an account when it's authority, separator, and/or shoulder don't match +and supporting transferred PIDs (where a PID is managed by an account when its authority, separator, and/or shoulder don't match the combination where the account can mint new PIDs). It also adds the ability for additional provider services beyond the existing DataCite, EZId, Handle, and PermaLink providers to be dynamically added as separate jar files. @@ -108,17 +108,9 @@ Once reconfiguration is complete, you should remove legacy, unused config. First Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. -For a list of new settings see the section below: +[New SMTP settings](#smtp-settings): + -> - dataverse.mail.system-email -> - dataverse.mail.mta.host -> - dataverse.mail.mta.port -> - dataverse.mail.mta.ssl.enable -> - dataverse.mail.mta.auth -> - dataverse.mail.mta.user -> - dataverse.mail.mta.password -> - dataverse.mail.mta.allow-utf8-addresses -> - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). ### Binder Redirect @@ -144,7 +136,7 @@ Access API will be able to take advantage of Direct Download for tab. files save This behavior is controlled by the new setting **:StoreIngestedTabularFilesWithVarHeaders**. It is false by default, preserving the legacy behavior. When enabled, Dataverse will be able to handle both the newly ingested files, and any already-existing legacy files stored without these headers transparently to the user. E.g. the access API will continue delivering tab-delimited files **with** this header line, whether it needs to add it dynamically for the legacy files, or reading complete files directly from storage for the ones stored with it. -An API for converting existing legacy tabular files will be added separately. [this line will need to be changed if we have time to add said API before 6.2 is released]. [TODO] +We are planning to add an API for converting existing legacy tabular files in a future release. ### Uningest/Reingest Options Available in the File Page Edit Menu @@ -167,8 +159,6 @@ The permissions required to assign a role have been fixed. It is no longer possi The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported on #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. -TODO: Whoever puts the release notes together should make sure there is the standard note about updating the schema after upgrading. - ### OAI-PMH Error Handling Has Been Improved OAI-PMH error handling has been improved to display a machine-readable error in XML rather than a 500 error with no further information. @@ -176,6 +166,9 @@ OAI-PMH error handling has been improved to display a machine-readable error in > - /oai?foo=bar will show "No argument 'verb' found" > - /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" +### Granting File Access Without Access Request + +A bug introduced with the guestboook-at-request, requests are not deleted when granted, they are now given the state granted. ## 💾 Persistence @@ -186,13 +179,12 @@ This release adds two missing database constraints that will assure that the ext ``` SELECT uri, count(*) FROM externalvocabularyvaluet group by uri; ``` -and +And: ``` SELECT spec, count(*) FROM oaiset group by spec; ``` -and then removing any duplicate rows (where count>1). +Then removing any duplicate rows (where count>1). -TODO: Whoever puts the release notes together should make sure there is the standard note about reloading metadata blocks for the citation, astrophysics, and biomedical blocks (plus any others from other PRs) after upgrading. ### Universe Field in Variablemetadata Table Changed @@ -278,10 +270,6 @@ The API endpoint `api/datasets/{id}/metadata` has been changed to default to the ## 📖 Guides -### New QA Guide - -A new QA Guide is intended mostly for the core development team but may be of interest to contributors. - ### Container Guide, Documentation for Faster Redeploy In the Container Guide, documentation for developers on how to quickly redeploy code has been added for Netbeans and improved for IntelliJ. @@ -294,12 +282,16 @@ Finally, an option to create tabs in the guides using [Sphinx Tabs](https://sphi The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container +### New QA Guide + +A new QA Guide is intended mostly for the core development team but may be of interest to contributors. + *** -# New Settings +# ⚙️ New Settings -### New microprofile settings : [Go back](#multiple-pid-sup) +### Microprofile settings : *The * indicates a provider id indicating which provider the setting is for* @@ -327,4 +319,122 @@ The Container Guide now containers a tutorial for running Dataverse in container > - dataverse.pid.*.handlenet.auth-handle > - dataverse.pid.*.handlenet.key.path > - dataverse.pid.*.handlenet.key.passphrase -> - dataverse.spi.pidproviders.directory \ No newline at end of file +> - dataverse.spi.pidproviders.directory + +[⬅️ Go back](#multiple-pid-sup) + +## SMTP Settings: + +> - dataverse.mail.system-email +> - dataverse.mail.mta.host +> - dataverse.mail.mta.port +> - dataverse.mail.mta.ssl.enable +> - dataverse.mail.mta.auth +> - dataverse.mail.mta.user +> - dataverse.mail.mta.password +> - dataverse.mail.mta.allow-utf8-addresses +> - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). + +[⬅️ Go back](#simplified-smtp-configuration) + +## Upgrade instructions +Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.1. + +0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara6` + +(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin undeploy dataverse-6.0` + +2\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +3\. Start Payara + +- `service payara start` + +4\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-6.1.war` + +As noted above, deployment of the war file might take several minutes due a database migration script required for the new storage quotas feature. + +5\. Restart Payara + +- `service payara stop` +- `service payara start` + +6\. Update Geospatial Metadata Block. + + ``` + wget https://github.com/IQSS/dataverse/releases/download/v6.1/geospatial.tsv + + curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv + ``` + +6a\. Update Citation Metadata Block. + +``` +wget https://github.com/IQSS/dataverse/releases/download/v6.2/citation.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv +``` + +6b\. Update Astrophysics Metadata Block. + +``` +wget https://github.com/IQSS/dataverse/releases/download/v6.2/astrophysics.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/astrophysics.tsv +``` + +6c\. Update Biomedical Metadata Block (to make Alternative Title repeatable) + +``` +wget https://github.com/IQSS/dataverse/releases/download/v6.2/biomedical.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/biomedical.tsv +``` + +7\. Upate Solr schema.xml. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). + +7a\. For installations without custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) + +- Replace schema.xml + + - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` + +- Start Solr instance (usually `service solr start`, depending on Solr/OS) + +7b\. For installations with custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) + +- There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): +``` + wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/9.3.0/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.3.0/server/solr/collection1/conf/schema.xml +``` +OR, alternatively, you can edit the following line in your schema.xml by hand as follows (to indicate that alternative title is now `multiValued="true"`): +``` + +``` + +- Restart Solr instance (usually `service solr restart` depending on solr/OS) + +8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). \ No newline at end of file From db46350a0278a66f990272058c0c0bebb96cf1c2 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 27 Mar 2024 12:46:21 -0400 Subject: [PATCH 0990/1112] remove sql table create in favor of automatic table creation by JPA --- .../makedatacount/MakeDataCountProcessState.java | 1 + src/main/resources/db/migration/V6.1.0.8.sql | 10 ---------- 2 files changed, 1 insertion(+), 10 deletions(-) delete mode 100644 src/main/resources/db/migration/V6.1.0.8.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java index bde705abf44..9b6ce457de9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java @@ -8,6 +8,7 @@ import java.util.Arrays; @Entity +@Table(indexes = {@Index(columnList="yearMonth")}) public class MakeDataCountProcessState implements Serializable { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) diff --git a/src/main/resources/db/migration/V6.1.0.8.sql b/src/main/resources/db/migration/V6.1.0.8.sql deleted file mode 100644 index b8f466c0b73..00000000000 --- a/src/main/resources/db/migration/V6.1.0.8.sql +++ /dev/null @@ -1,10 +0,0 @@ -CREATE TABLE IF NOT EXISTS makedatacountprocessstate ( - id SERIAL NOT NULL, - yearMonth VARCHAR(16) NOT NULL UNIQUE, - state ENUM('new', 'done', 'skip', 'processing', 'failed') NOT NULL, - state_change_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (ID) - ); - -CREATE INDEX IF NOT EXISTS INDEX_makedatacountprocessstate_yearMonth ON makedatacountprocessstate (yearMonth); - From bda01859486c9a6887f2bf054375aefc4f54e2f9 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 13:16:21 -0400 Subject: [PATCH 0991/1112] Update with initial feedback --- doc/release-notes/6.2-release-notes.md | 126 +++++++++++++------------ 1 file changed, 66 insertions(+), 60 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 529b8e59075..8e5bf32b0e1 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -34,7 +34,7 @@ These changes require per-provider settings rather than the global PID settings for installations using a single PID Provider account is provided, updating to use the new microprofile settings is highly recommended and will be required in a future version. -[New microprofile settings](#new-microprofile-settings) +[New microprofile settings](#microprofile-settings) ### Rate Limiting Using JCache (With Hazelcast As Provided by Payara) @@ -110,8 +110,6 @@ Please note: as there have been problems with email delivered to SPAM folders wh [New SMTP settings](#smtp-settings): - - ### Binder Redirect If your installation is configured to use Binder, you should remove the old "girder_ythub" tool and replace it with the tool described at https://github.com/IQSS/dataverse-binder-redirect @@ -122,11 +120,13 @@ For more information, see [#10360](https://github.com/IQSS/dataverse/issues/1036 When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the **<head>** of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. +### Harvesting Handle Missing Controlled Values +Allows datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. For mor information view the changes to the endpoint [here](#harvesting-client-endpoint-extended). ### Add .QPJ and .QMD Extensions to Shapefile Handling -- Support for **.qpj** and **.qmd** files in shapefile uploads has been introduced, ensuring that these files are properly recognized and handled as part of geospatial datasets in Dataverse. +Support for **.qpj** and **.qmd** files in shapefile uploads has been introduced, ensuring that these files are properly recognized and handled as part of geospatial datasets in Dataverse. ### Ingested Tabular Data Files Can Be Stored Without the Variable Name Header @@ -144,14 +144,19 @@ New Uningest/Reingest options are available in the File Page Edit menu. Ingest e The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. +### Sphinx Guides now Support Markdown Format and Tabs +Our guides now support the Markdown format with the extension **.md**. Additionally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running: +``` +pip install -r requirements.txt. +``` ## 🪲 Bug fixes ### Publication Status Facet Restored In version 6.1, the publication status facet location was unintentionally moved to the bottom. In this version, we have restored the original order. -### Permissions Required To Assign a Role Have Been Fixed +### Assign a Role With Higher Permissions Than Its Own Role Has Been Fixed The permissions required to assign a role have been fixed. It is no longer possible to assign a role that includes permissions that the assigning user doesn't have. @@ -267,7 +272,6 @@ Including the parameter and setting it to true will add a hierarchy showing whic The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. - ## 📖 Guides ### Container Guide, Documentation for Faster Redeploy @@ -276,68 +280,20 @@ In the Container Guide, documentation for developers on how to quickly redeploy Also in the context of containers, a new option to skip deployment has been added and the war file is now consistently named "dataverse.war" rather than having a version in the filename, such as "dataverse-6.1.war". This predictability makes tooling easier. -Finally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running `pip install -r requirements.txt`. - ### Evaluation Version Tutorial on the Containers Guide -The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container +The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/container ### New QA Guide -A new QA Guide is intended mostly for the core development team but may be of interest to contributors. +A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/develop/qa -*** +## ⚠️ Breaking Changes https://guides.dataverse.org/en/en/develop/qa/index.html -# ⚙️ New Settings +To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/develop/api/changelog.html). - -### Microprofile settings : - -*The * indicates a provider id indicating which provider the setting is for* - -> - dataverse.pid.providers -> - dataverse.pid.default-provider -> - dataverse.pid.*.type -> - dataverse.pid.*.label -> - dataverse.pid.*.authority -> - dataverse.pid.*.shoulder -> - dataverse.pid.*.identifier-generation-style -> - dataverse.pid.*.datafile-pid-format -> - dataverse.pid.*.managed-list -> - dataverse.pid.*.excluded-list -> - dataverse.pid.*.datacite.mds-api-url -> - dataverse.pid.*.datacite.rest-api-url -> - dataverse.pid.*.datacite.username -> - dataverse.pid.*.datacite.password -> - dataverse.pid.*.ezid.api-url -> - dataverse.pid.*.ezid.username -> - dataverse.pid.*.ezid.password -> - dataverse.pid.*.permalink.base-url -> - dataverse.pid.*.permalink.separator -> - dataverse.pid.*.handlenet.index -> - dataverse.pid.*.handlenet.independent-service -> - dataverse.pid.*.handlenet.auth-handle -> - dataverse.pid.*.handlenet.key.path -> - dataverse.pid.*.handlenet.key.passphrase -> - dataverse.spi.pidproviders.directory - -[⬅️ Go back](#multiple-pid-sup) - -## SMTP Settings: - -> - dataverse.mail.system-email -> - dataverse.mail.mta.host -> - dataverse.mail.mta.port -> - dataverse.mail.mta.ssl.enable -> - dataverse.mail.mta.auth -> - dataverse.mail.mta.user -> - dataverse.mail.mta.password -> - dataverse.mail.mta.allow-utf8-addresses -> - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). -[⬅️ Go back](#simplified-smtp-configuration) - -## Upgrade instructions +## 💻 Upgrade instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.1. @@ -437,4 +393,54 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as - Restart Solr instance (usually `service solr restart` depending on solr/OS) -8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). \ No newline at end of file +8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). + +*** + +## ⚙️ New Settings + +### Microprofile settings + +*The * indicates a provider id indicating which provider the setting is for* + +> - dataverse.pid.providers +> - dataverse.pid.default-provider +> - dataverse.pid.*.type +> - dataverse.pid.*.label +> - dataverse.pid.*.authority +> - dataverse.pid.*.shoulder +> - dataverse.pid.*.identifier-generation-style +> - dataverse.pid.*.datafile-pid-format +> - dataverse.pid.*.managed-list +> - dataverse.pid.*.excluded-list +> - dataverse.pid.*.datacite.mds-api-url +> - dataverse.pid.*.datacite.rest-api-url +> - dataverse.pid.*.datacite.username +> - dataverse.pid.*.datacite.password +> - dataverse.pid.*.ezid.api-url +> - dataverse.pid.*.ezid.username +> - dataverse.pid.*.ezid.password +> - dataverse.pid.*.permalink.base-url +> - dataverse.pid.*.permalink.separator +> - dataverse.pid.*.handlenet.index +> - dataverse.pid.*.handlenet.independent-service +> - dataverse.pid.*.handlenet.auth-handle +> - dataverse.pid.*.handlenet.key.path +> - dataverse.pid.*.handlenet.key.passphrase +> - dataverse.spi.pidproviders.directory + +[⬅️ Go back](#multiple-pid-sup) + +## SMTP Settings: + +> - dataverse.mail.system-email +> - dataverse.mail.mta.host +> - dataverse.mail.mta.port +> - dataverse.mail.mta.ssl.enable +> - dataverse.mail.mta.auth +> - dataverse.mail.mta.user +> - dataverse.mail.mta.password +> - dataverse.mail.mta.allow-utf8-addresses +> - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). + +[⬅️ Go back](#simplified-smtp-configuration) \ No newline at end of file From cf9b1bb3be9ecf080ff20b4dbb52c33790926a20 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 27 Mar 2024 13:40:55 -0400 Subject: [PATCH 0992/1112] Added a one sentence release note, plus another comment to the Dataset class where the redirect url is made. #10254 --- doc/release-notes/10254-fix-harvested-redirects.md | 1 + src/main/java/edu/harvard/iq/dataverse/Dataset.java | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 doc/release-notes/10254-fix-harvested-redirects.md diff --git a/doc/release-notes/10254-fix-harvested-redirects.md b/doc/release-notes/10254-fix-harvested-redirects.md new file mode 100644 index 00000000000..02ee5ddaf4d --- /dev/null +++ b/doc/release-notes/10254-fix-harvested-redirects.md @@ -0,0 +1 @@ +Redirects from search cards back to the original source for datasets harvested from "Generic OAI Archives", i.e. non-Dataverse OAI servers, have been fixed. diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index bb406c9f2fa..eaf406d01bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -859,6 +859,14 @@ public String getRemoteArchiveURL() { String harvestingUrl = this.getHarvestedFrom().getHarvestingUrl(); String archivalUrl = this.getHarvestedFrom().getArchiveUrl(); if (!harvestingUrl.contains(archivalUrl)) { + // When a Harvesting Client is created, the “archive url” is set to + // just the host part of the OAI url automatically. + // For example, if the OAI url was "https://remote.edu/oai", + // the archive url will default to "https://remote.edu/". + // If this is no longer true, we know it means the admin + // went to the trouble of setting it to something else - + // so we should use this url for the redirects back to source, + // instead of the global id resolver. return archivalUrl + this.getAuthority() + "/" + this.getIdentifier(); } // ... if not, we'll redirect to the resolver for the global id: From 3d0b7f8c5cd4a92b258f6c56e933ffc4a6c5eccc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 27 Mar 2024 14:11:39 -0400 Subject: [PATCH 0993/1112] bump version to 6.2 #10423 --- doc/sphinx-guides/source/conf.py | 4 ++-- doc/sphinx-guides/source/versions.rst | 3 ++- modules/dataverse-parent/pom.xml | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 98d10526517..5a4b124cf2e 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -67,9 +67,9 @@ # built documents. # # The short X.Y version. -version = '6.1' +version = '6.2' # The full version, including alpha/beta/rc tags. -release = '6.1' +release = '6.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index 2cf7f46dc5e..d76f9a889cb 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -7,7 +7,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. - pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) ` -- 6.1 +- 6.2 +- `6.1 `__ - `6.0 `__ - `5.14 `__ - `5.13 `__ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 1a538905a8d..612902b47a4 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,7 +131,7 @@ - 6.1 + 6.2 17 UTF-8 From b974f14e45fc2dfc3e2db0dbc2fd8724775ef0ab Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 27 Mar 2024 14:32:26 -0400 Subject: [PATCH 0994/1112] review comments --- .../java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java | 4 ++-- .../dataverse/makedatacount/MakeDataCountProcessState.java | 6 +++--- .../edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index d94ab42c516..1f2f1039327 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -219,7 +219,7 @@ public Response getProcessingState(@PathParam("yearMonth") String yearMonth) { JsonObjectBuilder output = Json.createObjectBuilder(); output.add("yearMonth", mdcps.getYearMonth()); output.add("state", mdcps.getState().name()); - output.add("state-change-timestamp", mdcps.getStateChangeTime().toString()); + output.add("stateChangeTimestamp", mdcps.getStateChangeTime().toString()); return ok(output); } else { return error(Status.NOT_FOUND, "Could not find an existing process state for " + yearMonth); @@ -239,7 +239,7 @@ public Response updateProcessingState(@PathParam("yearMonth") String yearMonth, JsonObjectBuilder output = Json.createObjectBuilder(); output.add("yearMonth", mdcps.getYearMonth()); output.add("state", mdcps.getState().name()); - output.add("state-change-timestamp", mdcps.getStateChangeTime().toString()); + output.add("stateChangeTimestamp", mdcps.getStateChangeTime().toString()); return ok(output); } diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java index 9b6ce457de9..2241a2c4ca8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountProcessState.java @@ -41,7 +41,7 @@ public String toString() { @Column(nullable = false) private MDCProcessState state; @Column(nullable = true) - private Timestamp state_change_time; + private Timestamp stateChangeTimestamp; public MakeDataCountProcessState() { } public MakeDataCountProcessState (String yearMonth, String state) { @@ -61,7 +61,7 @@ public String getYearMonth() { } public void setState(MDCProcessState state) { this.state = state; - this.state_change_time = Timestamp.from(Instant.now()); + this.stateChangeTimestamp = Timestamp.from(Instant.now()); } public void setState(String state) throws IllegalArgumentException { setState(MDCProcessState.fromString(state)); @@ -70,6 +70,6 @@ public MDCProcessState getState() { return this.state; } public Timestamp getStateChangeTime() { - return state_change_time; + return stateChangeTimestamp; } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java index 64856461703..69bdd8ee515 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java @@ -195,14 +195,14 @@ public void testGetUpdateDeleteProcessingState() { stateJson.prettyPrint(); String state1 = stateJson.getString("data.state"); assertThat(state1, Matchers.equalTo(MakeDataCountProcessState.MDCProcessState.PROCESSING.name())); - String updateTimestamp1 = stateJson.getString("data.state-change-timestamp"); + String updateTimestamp1 = stateJson.getString("data.stateChangeTimestamp"); updateState = UtilIT.makeDataCountUpdateProcessingState(yearMonth, MakeDataCountProcessState.MDCProcessState.DONE.toString()); updateState.then().assertThat().statusCode(OK.getStatusCode()); stateJson = JsonPath.from(updateState.body().asString()); stateJson.prettyPrint(); String state2 = stateJson.getString("data.state"); - String updateTimestamp2 = stateJson.getString("data.state-change-timestamp"); + String updateTimestamp2 = stateJson.getString("data.stateChangeTimestamp"); assertThat(state2, Matchers.equalTo(MakeDataCountProcessState.MDCProcessState.DONE.name())); assertThat(updateTimestamp2, Matchers.is(Matchers.greaterThan(updateTimestamp1))); From 00c9807840c76b31879f0d6f725d931dbbdd0fee Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 14:41:54 -0400 Subject: [PATCH 0995/1112] Link to the guide code removed --- doc/release-notes/6.2-release-notes.md | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 8e5bf32b0e1..c1f23c1e703 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -51,6 +51,8 @@ If neither setting exists rate limiting is disabled. In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." +For more details check the detailed guide on [this link](https://guides.dataverse.org/en/6.2/installation/config.html#configure-your-dataverse-installation-to-use-jcache-with-hazelcast-as-provided-by-payara-for-rate-limiting). + ``` curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{ "tier": 0, @@ -104,7 +106,7 @@ A future major release of Dataverse may remove this way of configuration. Please do take the opportunity to update your SMTP configuration. Details can be found in section of the Installation Guide starting with the [SMTP/Email Configuration](https://guides.dataverse.org/en/6.2/installation/config.html#smtp-email-configuration) section of the Installation Guide. -Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.1/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. +Once reconfiguration is complete, you should remove legacy, unused config. First, run `asadmin delete-javamail-resource mail/notifyMailSession` as described in the [6.1 guides](https://guides.dataverse.org/en/6.2/installation/installation-main.html#mail-host-configuration-authentication). Then run `curl -X DELETE http://localhost:8080/api/admin/settings/:SystemEmail` as this database setting has been replace with `dataverse.mail.system-email` as described below. Please note: as there have been problems with email delivered to SPAM folders when the "From" within mail envelope and the mail session configuration didn't match (#4210), as of this version the sole source for the "From" address is the setting `dataverse.mail.system-email` once you migrate to the new way of configuration. @@ -282,15 +284,15 @@ Also in the context of containers, a new option to skip deployment has been adde ### Evaluation Version Tutorial on the Containers Guide -The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/container +The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container ### New QA Guide -A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/develop/qa +A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/6.2/develop/qa ## ⚠️ Breaking Changes https://guides.dataverse.org/en/en/develop/qa/index.html -To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/develop/api/changelog.html). +To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/6.2/develop/api/changelog.html). ## 💻 Upgrade instructions @@ -368,7 +370,7 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta 7a\. For installations without custom or experimental metadata blocks: -- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.2/installation/prerequisites.html#solr-init-script)) - Replace schema.xml @@ -378,7 +380,7 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta 7b\. For installations with custom or experimental metadata blocks: -- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.2/installation/prerequisites.html#solr-init-script)) - There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): ``` @@ -393,7 +395,7 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as - Restart Solr instance (usually `service solr restart` depending on solr/OS) -8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). +8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/6.2/admin/metadataexport.html#batch-exports-through-the-api). *** @@ -428,6 +430,7 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as > - dataverse.pid.*.handlenet.key.path > - dataverse.pid.*.handlenet.key.passphrase > - dataverse.spi.pidproviders.directory +> - dataverse.solr.concurrency.max-async-indexes [⬅️ Go back](#multiple-pid-sup) From 117ee0f21f380f7b5cc12b914095dba5f0b4160a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 27 Mar 2024 14:45:31 -0400 Subject: [PATCH 0996/1112] shorten heading, other tweaks #9356 --- .../source/installation/config.rst | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 1f56fbdb848..ff786e900cc 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1427,8 +1427,8 @@ Before being moved there, .. _cache-rate-limiting: -Configure Your Dataverse Installation to Use JCache (with Hazelcast as Provided by Payara) for Rate Limiting ------------------------------------------------------------------------------------------------------------- +Rate Limiting +------------- Rate limiting has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. @@ -1447,7 +1447,7 @@ Note: If either of these settings exist in the database rate limiting will be en - :RateLimitingCapacityByTierAndAction is a JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. -:download:`rate-limit-actions.json ` Example json for RateLimitingCapacityByTierAndAction +:download:`rate-limit-actions.json ` Example JSON for RateLimitingCapacityByTierAndAction .. code-block:: bash @@ -4714,17 +4714,22 @@ The setting is ``false`` by default, preserving the legacy behavior. Number of calls allowed per hour if the specific command is not configured. The values represent the number of calls per hour per user for tiers 0,1,... A value of -1 can be used to signify no rate limit. Also, by default, a tier not defined would receive a default of no limit. +See also :ref:`cache-rate-limiting`. + :RateLimitingCapacityByTierAndAction ++++++++++++++++++++++++++++++++++++ JSON object specifying the rate by tier and a list of actions (commands). This allows for more control over the rate limit of individual API command calls. In the following example, calls made by a guest user (tier 0) for API GetLatestPublishedDatasetVersionCommand is further limited to only 10 calls per hour, while an authenticated user (tier 1) will be able to make 30 calls per hour to the same API. -{"rateLimits":[ -{"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, -{"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, -{"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}]} +.. code-block:: shell + {"rateLimits":[ + {"tier": 0, "limitPerHour": 10, "actions": ["GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand"]}, + {"tier": 0, "limitPerHour": 1, "actions": ["CreateGuestbookResponseCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]}, + {"tier": 1, "limitPerHour": 30, "actions": ["CreateGuestbookResponseCommand", "GetLatestPublishedDatasetVersionCommand", "GetPrivateUrlCommand", "GetDatasetCommand", "GetLatestAccessibleDatasetVersionCommand", "UpdateDatasetVersionCommand", "DestroyDatasetCommand", "DeleteDataFileCommand", "FinalizeDatasetPublicationCommand", "PublishDatasetCommand"]} + ]} +See also :ref:`cache-rate-limiting`. .. _supported MicroProfile Config API source: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html .. _password alias: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Server%20Configuration%20And%20Management/Configuration%20Options/Password%20Aliases.html From 3556555f718ae64322ac33517130f393090ae357 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 14:54:06 -0400 Subject: [PATCH 0997/1112] Header placed in the right place --- doc/release-notes/6.2-release-notes.md | 47 ++------------------------ 1 file changed, 3 insertions(+), 44 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index c1f23c1e703..f5343631b8e 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -5,6 +5,8 @@ Please note: To read these instructions in full, please go to https://github.com This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. +## 💡Release Highlights + ### Search and Facet by License A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. @@ -13,8 +15,6 @@ Also, the Search API now handles license filtering using the `fq` parameter, for For more information, see [#10204](https://github.com/IQSS/dataverse/issues/10204). -## 💡Release Highlights - ### Return to Author Now Requires a Reason The Popup for returning to author now allows to type in a message to explain the reasons of return and potential edits needed, that will be sent by email to the author. @@ -53,47 +53,6 @@ Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "1 For more details check the detailed guide on [this link](https://guides.dataverse.org/en/6.2/installation/config.html#configure-your-dataverse-installation-to-use-jcache-with-hazelcast-as-provided-by-payara-for-rate-limiting). -``` -curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{ - "tier": 0, - "limitPerHour": 10, - "actions": [ - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand" - ] -}, -{ - "tier": 0, - "limitPerHour": 1, - "actions": [ - "CreateGuestbookResponseCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] -}, -{ - "tier": 1, - "limitPerHour": 30, - "actions": [ - "CreateGuestbookResponseCommand", - "GetLatestPublishedDatasetVersionCommand", - "GetPrivateUrlCommand", - "GetDatasetCommand", - "GetLatestAccessibleDatasetVersionCommand", - "UpdateDatasetVersionCommand", - "DestroyDatasetCommand", - "DeleteDataFileCommand", - "FinalizeDatasetPublicationCommand", - "PublishDatasetCommand" - ] -}]' -``` - Hazelcast is configured in Payara and should not need any changes for this feature ### Simplified SMTP configuration @@ -184,7 +143,7 @@ A bug introduced with the guestboook-at-request, requests are not deleted when g This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running ``` -SELECT uri, count(*) FROM externalvocabularyvaluet group by uri; +SELECT uri, count(*) FROM externalvocabularyvalue group by uri; ``` And: ``` From 92c27e7fedfbf4d8918e8f2d8704e2eb20a9ccd0 Mon Sep 17 00:00:00 2001 From: landreev Date: Wed, 27 Mar 2024 15:00:45 -0400 Subject: [PATCH 0998/1112] Update 6.2-release-notes.md changes to the upgrade instructions order, because of the breaking change in solr schema. --- doc/release-notes/6.2-release-notes.md | 49 +++++++++++++------------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 8e5bf32b0e1..bc1314d6728 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -308,31 +308,44 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa (or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) -1\. Undeploy the previous version. +1\. Usually, when a Solr schema update is released, we recommend deploying the new version of Dataverse, then updating the `schema.xml` on the solr side. With 6.2, we recommend to install the base schema first. Without it Dataverse 6.2 is not going to be able to show any results after the initial deployment. If your instance is using any custom metadata blocks, you will need to further modify the schema, see the laset step of this instruction (step 8). + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) + +- Replace schema.xml + + - `cd /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` + - `wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/9.3.0/schema.xml` + +- Start Solr instance (usually `service solr start`, depending on Solr/OS) + +2\. Undeploy the previous version. - `$PAYARA/bin/asadmin undeploy dataverse-6.0` -2\. Stop Payara and remove the generated directory +3\. Stop Payara and remove the generated directory - `service payara stop` - `rm -rf $PAYARA/glassfish/domains/domain1/generated` -3\. Start Payara +4\. Start Payara - `service payara start` -4\. Deploy this version. +5\. Deploy this version. - `$PAYARA/bin/asadmin deploy dataverse-6.1.war` As noted above, deployment of the war file might take several minutes due a database migration script required for the new storage quotas feature. -5\. Restart Payara +6\. Restart Payara - `service payara stop` - `service payara start` -6\. Update Geospatial Metadata Block. +7\. Update the standard Metadata Blocks: + +7a\. Update Geospatial Metadata Block. ``` wget https://github.com/IQSS/dataverse/releases/download/v6.1/geospatial.tsv @@ -340,7 +353,7 @@ As noted above, deployment of the war file might take several minutes due a data curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv ``` -6a\. Update Citation Metadata Block. +7b\. Update Citation Metadata Block. ``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/citation.tsv @@ -348,7 +361,7 @@ wget https://github.com/IQSS/dataverse/releases/download/v6.2/citation.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv ``` -6b\. Update Astrophysics Metadata Block. +7c\. Update Astrophysics Metadata Block. ``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/astrophysics.tsv @@ -356,7 +369,7 @@ wget https://github.com/IQSS/dataverse/releases/download/v6.2/astrophysics.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/astrophysics.tsv ``` -6c\. Update Biomedical Metadata Block (to make Alternative Title repeatable) +7d\. Update Biomedical Metadata Block (to make Alternative Title repeatable) ``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/biomedical.tsv @@ -364,19 +377,7 @@ wget https://github.com/IQSS/dataverse/releases/download/v6.2/biomedical.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/biomedical.tsv ``` -7\. Upate Solr schema.xml. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). - -7a\. For installations without custom or experimental metadata blocks: - -- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) - -- Replace schema.xml - - - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` - -- Start Solr instance (usually `service solr start`, depending on Solr/OS) - -7b\. For installations with custom or experimental metadata blocks: +8\. For installations with custom or experimental metadata blocks: - Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) @@ -393,7 +394,7 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as - Restart Solr instance (usually `service solr restart` depending on solr/OS) -8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). +9\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). *** @@ -443,4 +444,4 @@ OR, alternatively, you can edit the following line in your schema.xml by hand as > - dataverse.mail.mta.allow-utf8-addresses > - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). -[⬅️ Go back](#simplified-smtp-configuration) \ No newline at end of file +[⬅️ Go back](#simplified-smtp-configuration) From 82e35b842958e7a272748c17a16bf8c3935124f7 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Wed, 27 Mar 2024 15:01:20 -0400 Subject: [PATCH 0999/1112] adding docs --- .../source/developers/make-data-count.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index 8eaa5c0d7f8..d64fff9ccc7 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -88,6 +88,23 @@ To read more about the Make Data Count api, see https://github.com/datacite/sash You can compare the MDC metrics display with the Dataverse installation's original by toggling the ``:DisplayMDCMetrics`` setting (true by default to display MDC metrics). +New Make Data Count Processing for Your Dataverse Installation +-------------------------------------------------------------- + +A new script (release date TBD) will be available for processing archived Dataverse log files. Monthly logs that are zipped, TARed, and copied to an archive can be processed by this script running nightly or weekly. +The script will keep track of the state of each tar file they are processed. Through the following APIs the state of each file can be checked or modified. +Setting the state to 'Skip' will prevent the file from being processed if the developer needs to analyze the contents. +'Failed' files will be re-tried in a later run. +'Done' files are successful and will be ignored going forward. +The file(s) currently being processed will have the state 'Processing'. +The states are [NEW, DONE, SKIP, PROCESSING, FAILED] +The script will process the newest set of log files (merging files from multiple nodes) and calling counter_processor. +The Admin APIs to manage the states include a GET, POST, and DELETE(For Testing). +yearMonth must be in the format yyyymm or yyyymmdd +``curl -X GET http://localhost:8080/api/admin/{yearMonth}/processingState`` +``curl -X POST http://localhost:8080/api/admin/{yearMonth}/processingState?state=done`` +``curl -X DELETE http://localhost:8080/api/admin/{yearMonth}/processingState`` + Resources --------- From aeee15431df48cdb403c84fbc8bea3e2e14ea2c0 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 15:37:41 -0400 Subject: [PATCH 1000/1112] Rate limit URL change --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 83f4572f9a3..54e968684b5 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -51,7 +51,7 @@ If neither setting exists rate limiting is disabled. In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." -For more details check the detailed guide on [this link](https://guides.dataverse.org/en/6.2/installation/config.html#configure-your-dataverse-installation-to-use-jcache-with-hazelcast-as-provided-by-payara-for-rate-limiting). +For more details check the detailed guide on [this link](https://guides.dataverse.org/en/6.2/installation/config.html#rate-limiting). Hazelcast is configured in Payara and should not need any changes for this feature From dd0587159cc99f14d53b9dae98a850685d9037d3 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 15:42:50 -0400 Subject: [PATCH 1001/1112] Collapsed scripts --- doc/release-notes/6.2-release-notes.md | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 54e968684b5..cce5db596c7 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -304,35 +304,21 @@ As noted above, deployment of the war file might take several minutes due a data - `service payara stop` - `service payara start` -7\. Update the standard Metadata Blocks: - -7a\. Update Geospatial Metadata Block. +7\. Update the following Metadata Blocks: ``` wget https://github.com/IQSS/dataverse/releases/download/v6.1/geospatial.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv - ``` - -7b\. Update Citation Metadata Block. -``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/citation.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv -``` -7c\. Update Astrophysics Metadata Block. - -``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/astrophysics.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/astrophysics.tsv -``` -7d\. Update Biomedical Metadata Block (to make Alternative Title repeatable) - -``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/biomedical.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/biomedical.tsv From da397eac9c9bb89323928072603f5b7bdb5a3179 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 15:44:32 -0400 Subject: [PATCH 1002/1112] Incorrect URL on script --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index cce5db596c7..93a9a6b4960 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -307,7 +307,7 @@ As noted above, deployment of the war file might take several minutes due a data 7\. Update the following Metadata Blocks: ``` - wget https://github.com/IQSS/dataverse/releases/download/v6.1/geospatial.tsv + wget https://github.com/IQSS/dataverse/releases/download/v6.2/geospatial.tsv curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv From 5c9cc55643ea6c50184df3c1edb2185a0d97887f Mon Sep 17 00:00:00 2001 From: landreev Date: Wed, 27 Mar 2024 15:50:08 -0400 Subject: [PATCH 1003/1112] Update 6.2-release-notes.md --- doc/release-notes/6.2-release-notes.md | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 93a9a6b4960..5350e7f80b7 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -328,16 +328,12 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta - Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.2/installation/prerequisites.html#solr-init-script)) -- There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): +- Run the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the correct path of your solr installation): ``` wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/9.3.0/update-fields.sh chmod +x update-fields.sh curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.3.0/server/solr/collection1/conf/schema.xml ``` -OR, alternatively, you can edit the following line in your schema.xml by hand as follows (to indicate that alternative title is now `multiValued="true"`): -``` - -``` - Restart Solr instance (usually `service solr restart` depending on solr/OS) From 51fc6654190cf9639ec8e9824be5925b104daca9 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 16:02:11 -0400 Subject: [PATCH 1004/1112] Removed the last step reExportAll, it is not required --- doc/release-notes/6.2-release-notes.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 5350e7f80b7..34c3171c3e1 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -337,8 +337,6 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta - Restart Solr instance (usually `service solr restart` depending on solr/OS) -9\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). - *** ## ⚙️ New Settings From 6cb9a4c76f970b5c60d30e2cb33b23af85e13911 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 27 Mar 2024 16:09:35 -0400 Subject: [PATCH 1005/1112] doc tweaks for MDC processingState API #10424 --- doc/release-notes/10424-new-api-for-mdc.md | 14 +++----- .../source/developers/make-data-count.rst | 34 +++++++++++++------ 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/doc/release-notes/10424-new-api-for-mdc.md b/doc/release-notes/10424-new-api-for-mdc.md index 8fb1f6d9e3d..fef8ee2af22 100644 --- a/doc/release-notes/10424-new-api-for-mdc.md +++ b/doc/release-notes/10424-new-api-for-mdc.md @@ -1,11 +1,5 @@ -The API endpoint `api/admin/makeDataCount/{yearMonth}/processingState` has been added to Get, Create/Update(POST), and Delete a State for processing Make Data Count logged metrics -For Create/Update the 'state' is passed in through a query parameter. -Example -- `curl POST http://localhost:8080/api/admin/makeDataCount/2024-03/processingState?state=Skip` +(Please put at the bottom of the list under 🌐 API) -Valid values for state are [New, Done, Skip, Processing, and Failed] -'New' can be used to re-trigger the processing of the data for the year-month specified. -'Skip' will prevent the file from being processed. -'Processing' shows the state where the file is currently being processed. -'Failed' shows the state where the file has failed and will be re-processed in the next run. If you don't want the file to be re-processed set the state to 'Skip'. -'Done' is the state where the file has been successfully processed. +### Experimental Make Data Count processingState API + +An experimental Make Data Count processingState API has been added. For now it has been documented in the developer guide: https://guides.dataverse.org/en/6.2/developers/make-data-count.html#processing-archived-logs diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index d64fff9ccc7..43779c35f7c 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -88,21 +88,33 @@ To read more about the Make Data Count api, see https://github.com/datacite/sash You can compare the MDC metrics display with the Dataverse installation's original by toggling the ``:DisplayMDCMetrics`` setting (true by default to display MDC metrics). -New Make Data Count Processing for Your Dataverse Installation --------------------------------------------------------------- +Processing Archived Logs +------------------------ A new script (release date TBD) will be available for processing archived Dataverse log files. Monthly logs that are zipped, TARed, and copied to an archive can be processed by this script running nightly or weekly. -The script will keep track of the state of each tar file they are processed. Through the following APIs the state of each file can be checked or modified. -Setting the state to 'Skip' will prevent the file from being processed if the developer needs to analyze the contents. -'Failed' files will be re-tried in a later run. -'Done' files are successful and will be ignored going forward. -The file(s) currently being processed will have the state 'Processing'. -The states are [NEW, DONE, SKIP, PROCESSING, FAILED] -The script will process the newest set of log files (merging files from multiple nodes) and calling counter_processor. -The Admin APIs to manage the states include a GET, POST, and DELETE(For Testing). -yearMonth must be in the format yyyymm or yyyymmdd + +The script will keep track of the state of each tar file they are processed and will make use of the following "processingState" API endpoints, which allow the state of each file to be checked or modified. + +The possible states are new, done, skip, processing, and failed. + +Setting the state to "skip" will prevent the file from being processed if the developer needs to analyze the contents. + +"failed" files will be re-tried in a later run. + +"done" files are successful and will be ignored going forward. + +The files currently being processed will have the state "processing". + +The script will process the newest set of log files (merging files from multiple nodes) and call Counter Processor. + +APIs to manage the states include GET, POST, and DELETE (for testing), as shown below. + +Note: ``yearMonth`` must be in the format ``yyyymm`` or ``yyyymmdd``. + ``curl -X GET http://localhost:8080/api/admin/{yearMonth}/processingState`` + ``curl -X POST http://localhost:8080/api/admin/{yearMonth}/processingState?state=done`` + ``curl -X DELETE http://localhost:8080/api/admin/{yearMonth}/processingState`` Resources From 18ac44fc5ea31010fc0e0583b3d3a9d8f3af3c32 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 16:22:12 -0400 Subject: [PATCH 1006/1112] Reindex solr change --- doc/release-notes/6.2-release-notes.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 34c3171c3e1..4017ae37b99 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -337,6 +337,14 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta - Restart Solr instance (usually `service solr restart` depending on solr/OS) +9\. Reindex Solr: + + For details, see https://guides.dataverse.org/en/6.0/admin/solr-search-index.html but here is the reindex command: + +``` + curl http://localhost:8080/api/admin/index +``` + *** ## ⚙️ New Settings From dff1669c82a92a7d75ce26e623bba607b299e72c Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 16:32:32 -0400 Subject: [PATCH 1007/1112] 10424 added --- doc/release-notes/10424-new-api-for-mdc.md | 5 ----- doc/release-notes/6.2-release-notes.md | 4 ++++ 2 files changed, 4 insertions(+), 5 deletions(-) delete mode 100644 doc/release-notes/10424-new-api-for-mdc.md diff --git a/doc/release-notes/10424-new-api-for-mdc.md b/doc/release-notes/10424-new-api-for-mdc.md deleted file mode 100644 index fef8ee2af22..00000000000 --- a/doc/release-notes/10424-new-api-for-mdc.md +++ /dev/null @@ -1,5 +0,0 @@ -(Please put at the bottom of the list under 🌐 API) - -### Experimental Make Data Count processingState API - -An experimental Make Data Count processingState API has been added. For now it has been documented in the developer guide: https://guides.dataverse.org/en/6.2/developers/make-data-count.html#processing-archived-logs diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 4017ae37b99..854bb606349 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -233,6 +233,10 @@ Including the parameter and setting it to true will add a hierarchy showing whic The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. +### Experimental Make Data Count processingState API + +An experimental Make Data Count processingState API has been added. For now it has been documented in the (developer guide)[https://guides.dataverse.org/en/6.2/developers/make-data-count.html#processing-archived-logs]. + ## 📖 Guides ### Container Guide, Documentation for Faster Redeploy From 14460222d4a84ccfde63eca087058a005fa11cf4 Mon Sep 17 00:00:00 2001 From: kmika11 Date: Wed, 27 Mar 2024 16:37:54 -0400 Subject: [PATCH 1008/1112] Update computational_workflow.tsv Add href tag to display url in computational workflow metadata block --- .../metadatablocks/computational_workflow.tsv | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/scripts/api/data/metadatablocks/computational_workflow.tsv b/scripts/api/data/metadatablocks/computational_workflow.tsv index 51b69cfdb80..72739d35b0f 100644 --- a/scripts/api/data/metadatablocks/computational_workflow.tsv +++ b/scripts/api/data/metadatablocks/computational_workflow.tsv @@ -1,21 +1,21 @@ -#metadataBlock name dataverseAlias displayName - computationalworkflow Computational Workflow Metadata -#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI - workflowType Computational Workflow Type The kind of Computational Workflow, which is designed to compose and execute a series of computational or data manipulation steps in a scientific application text 0 TRUE TRUE TRUE TRUE TRUE FALSE computationalworkflow - workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow - workflowDocumentation Documentation A link (URL) to the documentation or text describing the Computational Workflow and its use textbox 2 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow -#controlledVocabulary DatasetField Value identifier displayOrder - workflowType Common Workflow Language (CWL) workflowtype_cwl 1 - workflowType Workflow Description Language (WDL) workflowtype_wdl 2 - workflowType Nextflow workflowtype_nextflow 3 - workflowType Snakemake workflowtype_snakemake 4 - workflowType Ruffus workflowtype_ruffus 5 - workflowType DAGMan workflowtype_dagman 6 - workflowType Jupyter Notebook workflowtype_jupyter 7 - workflowType R Notebook workflowtype_rstudio 8 - workflowType MATLAB Script workflowtype_matlab 9 - workflowType Bash Script workflowtype_bash 10 - workflowType Makefile workflowtype_makefile 11 - workflowType Other Python-based workflow workflowtype_otherpython 12 - workflowType Other R-based workflow workflowtype_otherrbased 13 - workflowType Other workflowtype_other 100 +#metadataBlock name dataverseAlias displayName + computationalworkflow Computational Workflow Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI + workflowType Computational Workflow Type "The kind of Computational Workflow, which is designed to compose and execute a series of computational or data manipulation steps in a scientific application" text 0 TRUE TRUE TRUE TRUE TRUE FALSE computationalworkflow + workflowCodeRepository External Code Repository URL "A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN)" https://... url 1 "#VALUE" FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow + workflowDocumentation Documentation A link (URL) to the documentation or text describing the Computational Workflow and its use textbox 2 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow +#controlledVocabulary DatasetField Value identifier displayOrder + workflowType Common Workflow Language (CWL) workflowtype_cwl 1 + workflowType Workflow Description Language (WDL) workflowtype_wdl 2 + workflowType Nextflow workflowtype_nextflow 3 + workflowType Snakemake workflowtype_snakemake 4 + workflowType Ruffus workflowtype_ruffus 5 + workflowType DAGMan workflowtype_dagman 6 + workflowType Jupyter Notebook workflowtype_jupyter 7 + workflowType R Notebook workflowtype_rstudio 8 + workflowType MATLAB Script workflowtype_matlab 9 + workflowType Bash Script workflowtype_bash 10 + workflowType Makefile workflowtype_makefile 11 + workflowType Other Python-based workflow workflowtype_otherpython 12 + workflowType Other R-based workflow workflowtype_otherrbased 13 + workflowType Other workflowtype_other 100 \ No newline at end of file From bfee2f5e82967fc93152392b6436a806101fc26f Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 27 Mar 2024 17:25:41 -0400 Subject: [PATCH 1009/1112] Fixed pip script --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 854bb606349..fe032d74236 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -109,7 +109,7 @@ The /api/files//uningest api also now allows users who can publish the datas Our guides now support the Markdown format with the extension **.md**. Additionally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running: ``` -pip install -r requirements.txt. +pip install -r requirements.txt ``` ## 🪲 Bug fixes From 56b2cd51e2af073ad8b478fc6a9bf256854c37db Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:36:09 -0400 Subject: [PATCH 1010/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index fe032d74236..a4a8494a49b 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -15,7 +15,7 @@ Also, the Search API now handles license filtering using the `fq` parameter, for For more information, see [#10204](https://github.com/IQSS/dataverse/issues/10204). -### Return to Author Now Requires a Reason +### When Returning Datasets to Authors, Reviewers Can Add a Note to the Author The Popup for returning to author now allows to type in a message to explain the reasons of return and potential edits needed, that will be sent by email to the author. From 5f5b0c07817dd936c2e998910082fe33759cdc75 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:36:22 -0400 Subject: [PATCH 1011/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index a4a8494a49b..3f5f80df52f 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -25,7 +25,7 @@ Please note that this note is mandatory, but that you can still type a creative ### Support for Using Multiple PID Providers This release adds support for using multiple PID (DOI, Handle, PermaLink) providers, multiple PID provider accounts -(managing a given protocol, authority,separator, shoulder combination), assigning PID provider accounts to specific collections, +(managing a given protocol, authority, separator, shoulder combination), assigning PID provider accounts to specific collections, and supporting transferred PIDs (where a PID is managed by an account when its authority, separator, and/or shoulder don't match the combination where the account can mint new PIDs). It also adds the ability for additional provider services beyond the existing DataCite, EZId, Handle, and PermaLink providers to be dynamically added as separate jar files. From a67b7f13a996857d1a0b4f30adda269a19e45599 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:37:39 -0400 Subject: [PATCH 1012/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 3f5f80df52f..0437940ba53 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -37,7 +37,7 @@ and will be required in a future version. [New microprofile settings](#microprofile-settings) -### Rate Limiting Using JCache (With Hazelcast As Provided by Payara) +### Rate Limiting The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. From ad407c77da5ec2aacb1747c096ae81ddfdd27818 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:38:02 -0400 Subject: [PATCH 1013/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 0437940ba53..7a6859698b9 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -55,7 +55,7 @@ For more details check the detailed guide on [this link](https://guides.datavers Hazelcast is configured in Payara and should not need any changes for this feature -### Simplified SMTP configuration +### Simplified SMTP Configuration With this release, we deprecate the usage of `asadmin create-javamail-resource` to configure Dataverse to send mail using your SMTP server and provide a simplified, standard alternative using JVM options or MicroProfile Config. From 8f56d98f1bc7b38e69dacaf02ef2022969147c7e Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:38:19 -0400 Subject: [PATCH 1014/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 7a6859698b9..746c2702b3b 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -91,7 +91,7 @@ Support for **.qpj** and **.qmd** files in shapefile uploads has been introduced ### Ingested Tabular Data Files Can Be Stored Without the Variable Name Header -Tabular Data Ingest can now save the generated archival files with the list of variable names added as the first tab-delimited line. As the most significant effect of this feature. +Tabular Data Ingest can now save the generated archival files with the list of variable names added as the first tab-delimited line. Access API will be able to take advantage of Direct Download for tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed content on the fly. From ab3d9d3b0eec575fbef1935a2ec505607d970a5c Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:39:21 -0400 Subject: [PATCH 1015/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 746c2702b3b..8e04c93d479 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -93,7 +93,7 @@ Support for **.qpj** and **.qmd** files in shapefile uploads has been introduced Tabular Data Ingest can now save the generated archival files with the list of variable names added as the first tab-delimited line. -Access API will be able to take advantage of Direct Download for tab. files saved with these headers on S3 - since they no longer have to be generated and added to the streamed content on the fly. +Access API will be able to take advantage of Direct Download for .tab files saved with these headers on S3 - since they no longer have to be generated and added to the streamed content on the fly. This behavior is controlled by the new setting **:StoreIngestedTabularFilesWithVarHeaders**. It is false by default, preserving the legacy behavior. When enabled, Dataverse will be able to handle both the newly ingested files, and any already-existing legacy files stored without these headers transparently to the user. E.g. the access API will continue delivering tab-delimited files **with** this header line, whether it needs to add it dynamically for the legacy files, or reading complete files directly from storage for the ones stored with it. From a4a9f8293a0b75e929b9443b5f0384c799067558 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:39:40 -0400 Subject: [PATCH 1016/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 8e04c93d479..c52ac13e9db 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -105,7 +105,7 @@ New Uningest/Reingest options are available in the File Page Edit menu. Ingest e The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. -### Sphinx Guides now Support Markdown Format and Tabs +### Sphinx Guides Now Support Markdown Format and Tabs Our guides now support the Markdown format with the extension **.md**. Additionally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running: ``` From 917534b44ddbe29ae68e02ea4136f76bb3894db6 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:39:56 -0400 Subject: [PATCH 1017/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index c52ac13e9db..fe633589115 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -134,7 +134,7 @@ OAI-PMH error handling has been improved to display a machine-readable error in ### Granting File Access Without Access Request -A bug introduced with the guestboook-at-request, requests are not deleted when granted, they are now given the state granted. +A bug introduced with the guestbook-at-request, requests are not deleted when granted, they are now given the state granted. ## 💾 Persistence From 4767a35dd90a9026fa45ee3e4a4e4b7be96e59ce Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:41:11 -0400 Subject: [PATCH 1018/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index fe633589115..20f405829a8 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -187,7 +187,7 @@ This API endpoint indicates if the calling user can download at least one file f The API endpoint `api/harvest/clients/{harvestingClientNickname}` has been extended to include the following fields: -- **allowHarvestingMissingCVV**: enable/disable allowing datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. Default is false. +- **allowHarvestingMissingCVV**: enable/disable allowing datasets to be harvested with controlled vocabulary values that exist in the originating Dataverse server but are not present in the harvesting Dataverse server. The default is false. *Note: This setting is only available to the API and not currently accessible/settable via the UI* From ef9d3876290b72edc0fee0c69c8e4a8747ebe215 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:41:23 -0400 Subject: [PATCH 1019/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 20f405829a8..09d6f70c9f2 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -189,7 +189,7 @@ The API endpoint `api/harvest/clients/{harvestingClientNickname}` has been exten - **allowHarvestingMissingCVV**: enable/disable allowing datasets to be harvested with controlled vocabulary values that exist in the originating Dataverse server but are not present in the harvesting Dataverse server. The default is false. -*Note: This setting is only available to the API and not currently accessible/settable via the UI* +*Note: This setting is only available to the API and not currently accessible/settable via the UI.* ### Version Files Endpoint Extended From 8a03f69447407a1304348d77d4bcfba1f4677caf Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:41:30 -0400 Subject: [PATCH 1020/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 09d6f70c9f2..3a9f1392735 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -204,7 +204,7 @@ The API endpoint `/api/metadatablocks/{block_id}` has been extended to include t - **displayOrder**: The display order of the field in create/edit forms - **typeClass**: The type class of this field ("controlledVocabulary", "compound", or "primitive") -### Get File Citation As JSON +### Get File Citation as JSON It is now possible to retrieve via API the file citation as it appears on the file landing page. It is formatted in HTML and encoded in JSON. From 5e35796f1eebe6af08620568c0c6f3364f4be602 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:42:12 -0400 Subject: [PATCH 1021/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 3a9f1392735..8efdec3c578 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -275,7 +275,7 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa 1\. Usually, when a Solr schema update is released, we recommend deploying the new version of Dataverse, then updating the `schema.xml` on the solr side. With 6.2, we recommend to install the base schema first. Without it Dataverse 6.2 is not going to be able to show any results after the initial deployment. If your instance is using any custom metadata blocks, you will need to further modify the schema, see the laset step of this instruction (step 8). -- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.2/installation/prerequisites.html#solr-init-script)) - Replace schema.xml From 8c591c919a5c43f79250a445e02ea591c5a98fcb Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:42:37 -0400 Subject: [PATCH 1022/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 8efdec3c578..33db417c1dd 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -286,7 +286,7 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa 2\. Undeploy the previous version. -- `$PAYARA/bin/asadmin undeploy dataverse-6.0` +- `$PAYARA/bin/asadmin undeploy dataverse-6.1` 3\. Stop Payara and remove the generated directory From 1d04d024472fff294533e184894499f71dcc4d95 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:42:46 -0400 Subject: [PATCH 1023/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 33db417c1dd..914856e4d60 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -299,7 +299,7 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa 5\. Deploy this version. -- `$PAYARA/bin/asadmin deploy dataverse-6.1.war` +- `$PAYARA/bin/asadmin deploy dataverse-6.2.war` As noted above, deployment of the war file might take several minutes due a database migration script required for the new storage quotas feature. From 40b2152999695ed2a75064c7f9cf0afb39a0697c Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:43:00 -0400 Subject: [PATCH 1024/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 914856e4d60..a08a14f9403 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -343,7 +343,7 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta 9\. Reindex Solr: - For details, see https://guides.dataverse.org/en/6.0/admin/solr-search-index.html but here is the reindex command: + For details, see https://guides.dataverse.org/en/6.2/admin/solr-search-index.html but here is the reindex command: ``` curl http://localhost:8080/api/admin/index From 25ba92622fdfc8ac8d707c3924c5be0c3180c6af Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:43:08 -0400 Subject: [PATCH 1025/1112] Update 6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index a08a14f9403..1de95ecf466 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -353,7 +353,7 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta ## ⚙️ New Settings -### Microprofile settings +### MicroProfile Settings *The * indicates a provider id indicating which provider the setting is for* From 07b92f1d20e8f7505ff7c29452f4b8e81195cf7e Mon Sep 17 00:00:00 2001 From: kmika11 Date: Thu, 28 Mar 2024 08:51:33 -0400 Subject: [PATCH 1026/1112] Update computational_workflow.tsv Added href tag to "displayFormat" field in computational workflow metadata block tsv file --- .../metadatablocks/computational_workflow.tsv | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/scripts/api/data/metadatablocks/computational_workflow.tsv b/scripts/api/data/metadatablocks/computational_workflow.tsv index 72739d35b0f..17e1ec48925 100644 --- a/scripts/api/data/metadatablocks/computational_workflow.tsv +++ b/scripts/api/data/metadatablocks/computational_workflow.tsv @@ -1,21 +1,21 @@ -#metadataBlock name dataverseAlias displayName - computationalworkflow Computational Workflow Metadata -#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI - workflowType Computational Workflow Type "The kind of Computational Workflow, which is designed to compose and execute a series of computational or data manipulation steps in a scientific application" text 0 TRUE TRUE TRUE TRUE TRUE FALSE computationalworkflow - workflowCodeRepository External Code Repository URL "A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN)" https://... url 1 "#VALUE" FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow - workflowDocumentation Documentation A link (URL) to the documentation or text describing the Computational Workflow and its use textbox 2 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow -#controlledVocabulary DatasetField Value identifier displayOrder - workflowType Common Workflow Language (CWL) workflowtype_cwl 1 - workflowType Workflow Description Language (WDL) workflowtype_wdl 2 - workflowType Nextflow workflowtype_nextflow 3 - workflowType Snakemake workflowtype_snakemake 4 - workflowType Ruffus workflowtype_ruffus 5 - workflowType DAGMan workflowtype_dagman 6 - workflowType Jupyter Notebook workflowtype_jupyter 7 - workflowType R Notebook workflowtype_rstudio 8 - workflowType MATLAB Script workflowtype_matlab 9 - workflowType Bash Script workflowtype_bash 10 - workflowType Makefile workflowtype_makefile 11 - workflowType Other Python-based workflow workflowtype_otherpython 12 - workflowType Other R-based workflow workflowtype_otherrbased 13 - workflowType Other workflowtype_other 100 \ No newline at end of file +#metadataBlock name dataverseAlias displayName + computationalworkflow Computational Workflow Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI + workflowType Computational Workflow Type The kind of Computational Workflow, which is designed to compose and execute a series of computational or data manipulation steps in a scientific application text 0 TRUE TRUE TRUE TRUE TRUE FALSE computationalworkflow + workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 "#VALUE" FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow + workflowDocumentation Documentation A link (URL) to the documentation or text describing the Computational Workflow and its use textbox 2 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow +#controlledVocabulary DatasetField Value identifier displayOrder + workflowType Common Workflow Language (CWL) workflowtype_cwl 1 + workflowType Workflow Description Language (WDL) workflowtype_wdl 2 + workflowType Nextflow workflowtype_nextflow 3 + workflowType Snakemake workflowtype_snakemake 4 + workflowType Ruffus workflowtype_ruffus 5 + workflowType DAGMan workflowtype_dagman 6 + workflowType Jupyter Notebook workflowtype_jupyter 7 + workflowType R Notebook workflowtype_rstudio 8 + workflowType MATLAB Script workflowtype_matlab 9 + workflowType Bash Script workflowtype_bash 10 + workflowType Makefile workflowtype_makefile 11 + workflowType Other Python-based workflow workflowtype_otherpython 12 + workflowType Other R-based workflow workflowtype_otherrbased 13 + workflowType Other workflowtype_other 100 \ No newline at end of file From 08c5b90ec6b1678bd7881b45521dc64fb3138a73 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Mar 2024 09:43:05 -0400 Subject: [PATCH 1027/1112] #10442 fix null from template --- .../java/edu/harvard/iq/dataverse/search/IndexServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index cf0b177df95..d6b3fd8c339 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1016,7 +1016,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Thu, 28 Mar 2024 10:01:28 -0400 Subject: [PATCH 1028/1112] Create 10339-workflow.md Added release notes for 10339 update --- doc/release-notes/10339-workflow.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10339-workflow.md diff --git a/doc/release-notes/10339-workflow.md b/doc/release-notes/10339-workflow.md new file mode 100644 index 00000000000..dcc9c1f9690 --- /dev/null +++ b/doc/release-notes/10339-workflow.md @@ -0,0 +1 @@ +The computational workflow metadata block has been updated to display the link for the External Code Repository URL field. \ No newline at end of file From a2258cd9815e438606222779aa342288d9ebfd29 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 28 Mar 2024 15:30:05 +0000 Subject: [PATCH 1029/1112] Added: onlyDisplayedOnCreate and returnDatasetFieldTypes optional query params to dataverses/{identifier}/metadatablocks API --- .../edu/harvard/iq/dataverse/Dataverse.java | 3 -- .../dataverse/MetadataBlockServiceBean.java | 17 ++++++-- .../harvard/iq/dataverse/api/Dataverses.java | 20 ++++++--- .../impl/ListMetadataBlocksCommand.java | 43 +++++++++++++------ 4 files changed, 56 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index c1de9d63410..42db9c1392a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -466,9 +466,6 @@ public void setTemplateRoot(boolean templateRoot) { this.templateRoot = templateRoot; } - - - public List getMetadataBlocks() { return getMetadataBlocks(false); } diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java index 40b52129897..c4c95fae551 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java @@ -30,7 +30,7 @@ public List listMetadataBlocks() { public List listMetadataBlocks(boolean onlyDisplayedOnCreate) { if (onlyDisplayedOnCreate) { - return listMetadataBlocksDisplayedOnCreate(); + return listMetadataBlocksDisplayedOnCreate(null); } return em.createNamedQuery("MetadataBlock.listAll", MetadataBlock.class).getResultList(); } @@ -49,13 +49,24 @@ public MetadataBlock findByName(String name) { } } - private List listMetadataBlocksDisplayedOnCreate() { + public List listMetadataBlocksDisplayedOnCreate(Dataverse ownerDataverse) { CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(MetadataBlock.class); Root metadataBlockRoot = criteriaQuery.from(MetadataBlock.class); Join datasetFieldTypeJoin = metadataBlockRoot.join("datasetFieldTypes"); Predicate displayOnCreatePredicate = criteriaBuilder.isTrue(datasetFieldTypeJoin.get("displayOnCreate")); - criteriaQuery.where(displayOnCreatePredicate); + + if (ownerDataverse != null) { + Root dataverseRoot = criteriaQuery.from(Dataverse.class); + criteriaQuery.where(criteriaBuilder.and( + criteriaBuilder.equal(dataverseRoot.get("id"), ownerDataverse.getId()), + metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), + displayOnCreatePredicate + )); + } else { + criteriaQuery.where(displayOnCreatePredicate); + } + criteriaQuery.select(metadataBlockRoot).distinct(true); TypedQuery typedQuery = em.createQuery(criteriaQuery); return typedQuery.getResultList(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a1dbc3a1de6..6c105b67d77 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -710,14 +710,20 @@ public Response deleteDataverseLinkingDataverse(@Context ContainerRequestContext @GET @AuthRequired @Path("{identifier}/metadatablocks") - public Response listMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) { + public Response listMetadataBlocks(@Context ContainerRequestContext crc, + @PathParam("identifier") String dvIdtf, + @QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, + @QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes) { try { - JsonArrayBuilder arr = Json.createArrayBuilder(); - final List blocks = execCommand(new ListMetadataBlocksCommand(createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf))); - for (MetadataBlock mdb : blocks) { - arr.add(brief.json(mdb)); - } - return ok(arr); + final List metadataBlocks = execCommand( + new ListMetadataBlocksCommand( + createDataverseRequest(getRequestUser(crc)), + findDataverseOrDie(dvIdtf), + onlyDisplayedOnCreate, + metadataBlockSvc + ) + ); + return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate)); } catch (WrappedResponse we) { return we.getResponse(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java index 912318cf155..f4689596160 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java @@ -2,11 +2,13 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + import java.util.Collections; import java.util.List; import java.util.Map; @@ -14,29 +16,42 @@ /** * Lists the metadata blocks of a {@link Dataverse}. - * + * * @author michael */ // no annotations here, since permissions are dynamically decided -public class ListMetadataBlocksCommand extends AbstractCommand>{ - - private final Dataverse dv; - - public ListMetadataBlocksCommand(DataverseRequest aRequest, Dataverse aDataverse) { - super(aRequest, aDataverse); - dv = aDataverse; +public class ListMetadataBlocksCommand extends AbstractCommand> { + + private final Dataverse dataverse; + private final boolean onlyDisplayedOnCreate; + private final MetadataBlockServiceBean metadataBlockService; + + public ListMetadataBlocksCommand(DataverseRequest request, Dataverse dataverse, boolean onlyDisplayedOnCreate, MetadataBlockServiceBean metadataBlockService) { + super(request, dataverse); + this.dataverse = dataverse; + this.onlyDisplayedOnCreate = onlyDisplayedOnCreate; + this.metadataBlockService = metadataBlockService; } @Override public List execute(CommandContext ctxt) throws CommandException { - return dv.getMetadataBlocks(); + if (onlyDisplayedOnCreate) { + return listMetadataBlocksDisplayedOnCreate(dataverse); + } + return dataverse.getMetadataBlocks(); } - + + private List listMetadataBlocksDisplayedOnCreate(Dataverse dataverse) { + if (dataverse.isMetadataBlockRoot() || dataverse.getOwner() == null) { + return metadataBlockService.listMetadataBlocksDisplayedOnCreate(dataverse); + } + return listMetadataBlocksDisplayedOnCreate(dataverse.getOwner()); + } + @Override public Map> getRequiredPermissions() { return Collections.singletonMap("", - dv.isReleased() ? Collections.emptySet() - : Collections.singleton(Permission.ViewUnpublishedDataverse)); - } - + dataverse.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } } From 822a1447de5cdb5d448648a49419de9bed38e6bf Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 28 Mar 2024 12:19:06 -0400 Subject: [PATCH 1030/1112] Update doc/release-notes/6.2-release-notes.md Co-authored-by: Philip Durbin --- doc/release-notes/6.2-release-notes.md | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 1de95ecf466..f2ef4e84074 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -47,13 +47,7 @@ Two database settings configure the rate limiting. Note: If either of these settings exist in the database rate limiting will be enabled. If neither setting exists rate limiting is disabled. -`:RateLimitingDefaultCapacityTiers` is a comma separated list of default values for each tier. -In the following example, the default for tier `0` (guest users) is set to 10,000 calls per command per hour and tier `1` (authenticated users) is set to 20,000 calls per command per hour. -Tiers not specified in this setting will default to `-1` (No Limit). I.e., -d "10000" is equivalent to -d "10000,-1,-1,..." - -For more details check the detailed guide on [this link](https://guides.dataverse.org/en/6.2/installation/config.html#rate-limiting). - -Hazelcast is configured in Payara and should not need any changes for this feature +For more information check the detailed guide at [this link](https://guides.dataverse.org/en/6.2/installation/config.html#rate-limiting). ### Simplified SMTP Configuration From 4e3a024b13fe9d48949e19f1dee1e64e08265116 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 12:21:17 -0400 Subject: [PATCH 1031/1112] Rate limit --- doc/release-notes/6.2-release-notes.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index f2ef4e84074..3874cbb7b52 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -42,12 +42,11 @@ and will be required in a future version. The option to rate limit has been added to prevent users from over taxing the system either deliberately or by runaway automated processes. Rate limiting can be configured on a tier level with tier 0 being reserved for guest users and tiers 1-any for authenticated users. Superuser accounts are exempt from rate limiting. + Rate limits can be imposed on command APIs by configuring the tier, the command, and the hourly limit in the database. -Two database settings configure the rate limiting. -Note: If either of these settings exist in the database rate limiting will be enabled. -If neither setting exists rate limiting is disabled. +Two database settings configure the rate limiting **:RateLimitingDefaultCapacityTiers** and **RateLimitingCapacityByTierAndAction**, If either of these settings exist in the database rate limiting will be enabled and If neither setting exists rate limiting is disabled. -For more information check the detailed guide at [this link](https://guides.dataverse.org/en/6.2/installation/config.html#rate-limiting). +For more details check the detailed guide on [this link](https://guides.dataverse.org/en/6.2/installation/config.html#rate-limiting). ### Simplified SMTP Configuration From 4a9c9337cc0ada383a143109becbf45d72a98f52 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 13:12:46 -0400 Subject: [PATCH 1032/1112] Update with morning chages --- doc/release-notes/6.2-release-notes.md | 42 ++++++++++++++++++++------ 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 3874cbb7b52..9bec047cbe3 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -5,15 +5,31 @@ Please note: To read these instructions in full, please go to https://github.com This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. +# Index +- [💡Release Highlights](#release-highlights) +- [🪲 Bug fixes](#-bug-fixes) +- [💾 Persistence](#-persistence) +- [🌐 API](#-api) +- [📖 Guides](#-guides) +- [⚠️ Breaking Changes](#-breaking-changes) +- [💻 Upgrade instructions](#-upgrade-instructions) +- [⚙️ New Settings](#-new-settings) + + + ## 💡Release Highlights -### Search and Facet by License +### Search and Facet by License +License have been added to the search facets in the search side panel to filter datasets by license (e.g. CC0). + +Datasets with Custom Terms are aggregated under the "Custom Terms" value of this facet. See the [Licensing](https://guides.dataverse.org/en/6.2/installation/advanced.html#licensing) section of the guide for more details on configured Licenses and Custom Terms. -A new search facet called "License" has been added and will be displayed as long as there is more than one license in datasets and datafiles in browse/search results. This facet allow you to filter by license such as CC0, etc. +For more information, see [#9060](https://github.com/IQSS/dataverse/issues/9060). -Also, the Search API now handles license filtering using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0. -For more information, see [#10204](https://github.com/IQSS/dataverse/issues/10204). +Licenses can also be used to filter the Search API results using the `fq` parameter, for example : `/api/search?q=*&fq=license%3A%22CC0+1.0%22` for CC0 1.0, see the [Search API guide](https://guides.dataverse.org/en/6.1/api/search.html) for more examples. + +For more information, see [#10204](https://github.com/IQSS/dataverse/pull/10204). ### When Returning Datasets to Authors, Reviewers Can Add a Note to the Author @@ -34,6 +50,8 @@ These changes require per-provider settings rather than the global PID settings for installations using a single PID Provider account is provided, updating to use the new microprofile settings is highly recommended and will be required in a future version. +For more information check the PID settings on [this link](https://guides.dataverse.org/en/6.2/installation/config.html#global-settings). + [New microprofile settings](#microprofile-settings) @@ -116,7 +134,7 @@ The permissions required to assign a role have been fixed. It is no longer possi ### Geospatial Metadata Block Fields for North and South Renamed -The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported on #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. +The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported in #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. ### OAI-PMH Error Handling Has Been Improved @@ -133,7 +151,7 @@ A bug introduced with the guestbook-at-request, requests are not deleted when gr ### Missing Database Constraints -This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running +This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running the following commands: ``` SELECT uri, count(*) FROM externalvocabularyvalue group by uri; @@ -149,11 +167,11 @@ Then removing any duplicate rows (where count>1). Universe field in variablemetadata table was changed from **varchar(255)** to **text**. The change was made to support longer strings in "universe" metadata field, similar to the rest of text fields in variablemetadata table. -### Postgres Versions +### PostgreSQL Versions -This release adds install script support for the new permissions model in Postgres versions 15+, and bumps FlyWay to support Postgres 16. +This release adds install script support for the new permissions model in PostgreSQL versions 15+, and bumps Flyway to support PostgreSQL 16. -Postgres 13 remains the version used with automated testing. +PostgreSQL 13 remains the version used with automated testing. ## 🌐 API @@ -166,6 +184,8 @@ Listing collection/dataverse role assignments via API still requires ManageDatav A new Index API endpoint has been added allowing an admin to clear an individual dataset from Solr. +For more information visit the documentation on [this link](https://guides.dataverse.org/en/6.2/admin/solr-search-index.html#clearing-a-dataset-from-solr) + ### New Accounts Metrics API Users can retrieve new types of metrics related to user accounts. The new capabilities are [described](https://guides.dataverse.org/en/6.2/api/metrics.html) in the guides. @@ -201,7 +221,9 @@ The API endpoint `/api/metadatablocks/{block_id}` has been extended to include t It is now possible to retrieve via API the file citation as it appears on the file landing page. It is formatted in HTML and encoded in JSON. -This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. This functionality has been requested in [#3140](https://github.com/IQSS/dataverse/issues/3140) and [#9994](https://github.com/IQSS/dataverse/issues/9994) +This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. + +For mor information check the documentation on [this link](https://guides.dataverse.org/en/6.2/api/native-api.html#get-file-citation-as-json) ### Files Endpoint Extended From 4385372235e0ba085995201748ca98ae228d4b2b Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 13:14:45 -0400 Subject: [PATCH 1033/1112] Deleted url --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 9bec047cbe3..58cee103f94 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -268,7 +268,7 @@ The Container Guide now containers a tutorial for running Dataverse in container A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/6.2/develop/qa -## ⚠️ Breaking Changes https://guides.dataverse.org/en/en/develop/qa/index.html +## ⚠️ Breaking Changes To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/6.2/develop/api/changelog.html). From 452e79c710058e596b89c323e6ff60b35c9e3826 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 13:16:57 -0400 Subject: [PATCH 1034/1112] Update --- doc/release-notes/6.2-release-notes.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 58cee103f94..6e162cd6c09 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -11,9 +11,9 @@ Thank you to all of the community members who contributed code, suggestions, bug - [💾 Persistence](#-persistence) - [🌐 API](#-api) - [📖 Guides](#-guides) -- [⚠️ Breaking Changes](#-breaking-changes) +- [⚠️ Breaking Changes](#%EF%B8%8F-breaking-changes) - [💻 Upgrade instructions](#-upgrade-instructions) -- [⚙️ New Settings](#-new-settings) +- [⚙️ New Settings](#%EF%B8%8F-new-settings) From 5150f5b249163244760d714e5f9f811f42106889 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Thu, 28 Mar 2024 13:27:56 -0400 Subject: [PATCH 1035/1112] fix bad curl url in doc --- doc/sphinx-guides/source/developers/make-data-count.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index 43779c35f7c..edad580e451 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -111,11 +111,11 @@ APIs to manage the states include GET, POST, and DELETE (for testing), as shown Note: ``yearMonth`` must be in the format ``yyyymm`` or ``yyyymmdd``. -``curl -X GET http://localhost:8080/api/admin/{yearMonth}/processingState`` +``curl -X GET http://localhost:8080/api/admin/makeDataCount/{yearMonth}/processingState`` -``curl -X POST http://localhost:8080/api/admin/{yearMonth}/processingState?state=done`` +``curl -X POST http://localhost:8080/api/admin/makeDataCount/{yearMonth}/processingState?state=done`` -``curl -X DELETE http://localhost:8080/api/admin/{yearMonth}/processingState`` +``curl -X DELETE http://localhost:8080/api/admin/makeDataCount/{yearMonth}/processingState`` Resources --------- From b102bf7b5dbda99f194a8695149dedb0f658a331 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 13:32:25 -0400 Subject: [PATCH 1036/1112] Latest update post index --- doc/release-notes/6.2-release-notes.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 6e162cd6c09..bd584feb619 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -242,7 +242,9 @@ The endpoint supports the *includeDeaccessioned* and *returnDatasetVersion* opti The API endpoints for getting datasets, Dataverse collections, and datafiles have been extended to support the following optional 'returnOwners' query parameter. -Including the parameter and setting it to true will add a hierarchy showing which dataset and dataverse collection(s) the object is part of to the json object returned. +Including the parameter and setting it to true will add a hierarchy showing which dataset and dataverse collection(s) the object is part of to the json object returned. + +For more information visit the full native API guide on [this link](https://guides.dataverse.org/en/6.2/api/native-api.html) ### Endpoint Fixed: Datasets Metadata From 9457ea1659872754f47549d6f30ff3049250ca97 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 13:36:22 -0400 Subject: [PATCH 1037/1112] Change order of index api-breaking --- doc/release-notes/6.2-release-notes.md | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index bd584feb619..335fd606f80 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -10,8 +10,8 @@ Thank you to all of the community members who contributed code, suggestions, bug - [🪲 Bug fixes](#-bug-fixes) - [💾 Persistence](#-persistence) - [🌐 API](#-api) -- [📖 Guides](#-guides) - [⚠️ Breaking Changes](#%EF%B8%8F-breaking-changes) +- [📖 Guides](#-guides) - [💻 Upgrade instructions](#-upgrade-instructions) - [⚙️ New Settings](#%EF%B8%8F-new-settings) @@ -254,6 +254,10 @@ The API endpoint `api/datasets/{id}/metadata` has been changed to default to the An experimental Make Data Count processingState API has been added. For now it has been documented in the (developer guide)[https://guides.dataverse.org/en/6.2/developers/make-data-count.html#processing-archived-logs]. +## ⚠️ Breaking Changes + +To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/6.2/develop/api/changelog.html). + ## 📖 Guides ### Container Guide, Documentation for Faster Redeploy @@ -270,11 +274,6 @@ The Container Guide now containers a tutorial for running Dataverse in container A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/6.2/develop/qa -## ⚠️ Breaking Changes - -To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/6.2/develop/api/changelog.html). - - ## 💻 Upgrade instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. From 4957f02744d7c01a276c26f8326fe555d91b1701 Mon Sep 17 00:00:00 2001 From: Katie Mika Date: Thu, 28 Mar 2024 14:08:46 -0400 Subject: [PATCH 1038/1112] Update doc/release-notes/10339-workflow.md Clarified that link is now clickable. Co-authored-by: Philip Durbin --- doc/release-notes/10339-workflow.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10339-workflow.md b/doc/release-notes/10339-workflow.md index dcc9c1f9690..8998f9794df 100644 --- a/doc/release-notes/10339-workflow.md +++ b/doc/release-notes/10339-workflow.md @@ -1 +1 @@ -The computational workflow metadata block has been updated to display the link for the External Code Repository URL field. \ No newline at end of file +The computational workflow metadata block has been updated to present a clickable link for the External Code Repository URL field. \ No newline at end of file From b46c582213e33711e6813108d8b4314e2a04c651 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 28 Mar 2024 14:11:13 -0400 Subject: [PATCH 1039/1112] describe #10422 in release notes --- doc/release-notes/10381-index-after-publish.md | 3 --- doc/release-notes/6.2-release-notes.md | 5 +++++ 2 files changed, 5 insertions(+), 3 deletions(-) delete mode 100644 doc/release-notes/10381-index-after-publish.md diff --git a/doc/release-notes/10381-index-after-publish.md b/doc/release-notes/10381-index-after-publish.md deleted file mode 100644 index 84c84d75a28..00000000000 --- a/doc/release-notes/10381-index-after-publish.md +++ /dev/null @@ -1,3 +0,0 @@ -New release adds a new microprofile setting for maximum number of simultaneously running asynchronous dataset index operations that defaults to ``4``: - -dataverse.solr.concurrency.max-async-indexes \ No newline at end of file diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 335fd606f80..d7eeeebde2f 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -122,6 +122,11 @@ Our guides now support the Markdown format with the extension **.md**. Additiona ``` pip install -r requirements.txt ``` + +### Number of Concurrent Indexing Operations Now Configurable + +A new MicroProfile setting called `dataverse.solr.concurrency.max-async-indexes` has been added that controls the maximum number of simultaneously running asynchronous dataset index operations (defaults to 4). + ## 🪲 Bug fixes ### Publication Status Facet Restored From 0ab80d7f3c34f2e9005c8ec82eac7c6a29c9e301 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 28 Mar 2024 14:27:42 -0400 Subject: [PATCH 1040/1112] improve guidance on writing release note snippets Please link to the HTML preview. Otherwise, it's time-consuming to find it later. --- doc/sphinx-guides/source/developers/version-control.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index c5669d02e77..a9a60de380c 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -117,10 +117,9 @@ As described at :ref:`write-release-notes`, at release time we compile together Here's how to add a release note snippet to your pull request: - Create a Markdown file under ``doc/release-notes``. You can reuse the name of your branch and append ".md" to it, e.g. ``3728-doc-apipolicy-fix.md`` -- Edit the snippet to include anything you think should be mentioned in the release notes, such as: +- Edit the snippet to include anything you think should be mentioned in the release notes. Please include the following if they apply: - - Descriptions of new features - - Explanations of bugs fixed + - Descriptions of new features or bug fixed, including a link to the HTML preview of the docs you wrote (e.g. https://dataverse-guide--9939.org.readthedocs.build/en/9939/installation/config.html#smtp-email-configuration ) - New configuration settings - Upgrade instructions - Etc. From aac525600b306522d7fa50c1e8201735c922c33e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 28 Mar 2024 14:41:07 -0400 Subject: [PATCH 1041/1112] tweaks #10422 --- doc/release-notes/6.2-release-notes.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index d7eeeebde2f..bd72b78d3d7 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -5,12 +5,12 @@ Please note: To read these instructions in full, please go to https://github.com This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. -# Index +# Table of Contents - [💡Release Highlights](#release-highlights) - [🪲 Bug fixes](#-bug-fixes) - [💾 Persistence](#-persistence) - [🌐 API](#-api) -- [⚠️ Breaking Changes](#%EF%B8%8F-breaking-changes) +- [⚠️ Backward Incompatibilities](#%EF%B8%8F-backward-incompatibilities) - [📖 Guides](#-guides) - [💻 Upgrade instructions](#-upgrade-instructions) - [⚙️ New Settings](#%EF%B8%8F-new-settings) @@ -259,7 +259,7 @@ The API endpoint `api/datasets/{id}/metadata` has been changed to default to the An experimental Make Data Count processingState API has been added. For now it has been documented in the (developer guide)[https://guides.dataverse.org/en/6.2/developers/make-data-count.html#processing-archived-logs]. -## ⚠️ Breaking Changes +## ⚠️ Backward Incompatibilities To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/6.2/develop/api/changelog.html). From 8c8fe789ef3f1d000756e6f3017f706ce1d7aedf Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 28 Mar 2024 14:56:34 -0400 Subject: [PATCH 1042/1112] add settings, reformat --- doc/release-notes/6.2-release-notes.md | 76 ++++++++++++++------------ 1 file changed, 41 insertions(+), 35 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index bd72b78d3d7..5b77d4a78c6 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -378,45 +378,51 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta *The * indicates a provider id indicating which provider the setting is for* -> - dataverse.pid.providers -> - dataverse.pid.default-provider -> - dataverse.pid.*.type -> - dataverse.pid.*.label -> - dataverse.pid.*.authority -> - dataverse.pid.*.shoulder -> - dataverse.pid.*.identifier-generation-style -> - dataverse.pid.*.datafile-pid-format -> - dataverse.pid.*.managed-list -> - dataverse.pid.*.excluded-list -> - dataverse.pid.*.datacite.mds-api-url -> - dataverse.pid.*.datacite.rest-api-url -> - dataverse.pid.*.datacite.username -> - dataverse.pid.*.datacite.password -> - dataverse.pid.*.ezid.api-url -> - dataverse.pid.*.ezid.username -> - dataverse.pid.*.ezid.password -> - dataverse.pid.*.permalink.base-url -> - dataverse.pid.*.permalink.separator -> - dataverse.pid.*.handlenet.index -> - dataverse.pid.*.handlenet.independent-service -> - dataverse.pid.*.handlenet.auth-handle -> - dataverse.pid.*.handlenet.key.path -> - dataverse.pid.*.handlenet.key.passphrase -> - dataverse.spi.pidproviders.directory -> - dataverse.solr.concurrency.max-async-indexes +- dataverse.pid.providers +- dataverse.pid.default-provider +- dataverse.pid.*.type +- dataverse.pid.*.label +- dataverse.pid.*.authority +- dataverse.pid.*.shoulder +- dataverse.pid.*.identifier-generation-style +- dataverse.pid.*.datafile-pid-format +- dataverse.pid.*.managed-list +- dataverse.pid.*.excluded-list +- dataverse.pid.*.datacite.mds-api-url +- dataverse.pid.*.datacite.rest-api-url +- dataverse.pid.*.datacite.username +- dataverse.pid.*.datacite.password +- dataverse.pid.*.ezid.api-url +- dataverse.pid.*.ezid.username +- dataverse.pid.*.ezid.password +- dataverse.pid.*.permalink.base-url +- dataverse.pid.*.permalink.separator +- dataverse.pid.*.handlenet.index +- dataverse.pid.*.handlenet.independent-service +- dataverse.pid.*.handlenet.auth-handle +- dataverse.pid.*.handlenet.key.path +- dataverse.pid.*.handlenet.key.passphrase +- dataverse.spi.pidproviders.directory +- dataverse.solr.concurrency.max-async-indexes [⬅️ Go back](#multiple-pid-sup) ## SMTP Settings: -> - dataverse.mail.system-email -> - dataverse.mail.mta.host -> - dataverse.mail.mta.port -> - dataverse.mail.mta.ssl.enable -> - dataverse.mail.mta.auth -> - dataverse.mail.mta.user -> - dataverse.mail.mta.password -> - dataverse.mail.mta.allow-utf8-addresses -> - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). +- dataverse.mail.system-email +- dataverse.mail.mta.host +- dataverse.mail.mta.port +- dataverse.mail.mta.ssl.enable +- dataverse.mail.mta.auth +- dataverse.mail.mta.user +- dataverse.mail.mta.password +- dataverse.mail.mta.allow-utf8-addresses +- Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). [⬅️ Go back](#simplified-smtp-configuration) + +## Database Settings: + +- :RateLimitingDefaultCapacityTiers +- :RateLimitingCapacityByTierAndAction +- :StoreIngestedTabularFilesWithVarHeaders From 950146c23f862007cfc6c3531235888905268781 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 28 Mar 2024 15:00:52 -0400 Subject: [PATCH 1043/1112] a few more sections, move settings --- doc/release-notes/6.2-release-notes.md | 119 +++++++++++++------------ 1 file changed, 63 insertions(+), 56 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 5b77d4a78c6..81db2e3a93e 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -279,6 +279,69 @@ The Container Guide now containers a tutorial for running Dataverse in container A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/6.2/develop/qa +## ⚙️ New Settings + +### MicroProfile Settings + +*The * indicates a provider id indicating which provider the setting is for* + +- dataverse.pid.providers +- dataverse.pid.default-provider +- dataverse.pid.*.type +- dataverse.pid.*.label +- dataverse.pid.*.authority +- dataverse.pid.*.shoulder +- dataverse.pid.*.identifier-generation-style +- dataverse.pid.*.datafile-pid-format +- dataverse.pid.*.managed-list +- dataverse.pid.*.excluded-list +- dataverse.pid.*.datacite.mds-api-url +- dataverse.pid.*.datacite.rest-api-url +- dataverse.pid.*.datacite.username +- dataverse.pid.*.datacite.password +- dataverse.pid.*.ezid.api-url +- dataverse.pid.*.ezid.username +- dataverse.pid.*.ezid.password +- dataverse.pid.*.permalink.base-url +- dataverse.pid.*.permalink.separator +- dataverse.pid.*.handlenet.index +- dataverse.pid.*.handlenet.independent-service +- dataverse.pid.*.handlenet.auth-handle +- dataverse.pid.*.handlenet.key.path +- dataverse.pid.*.handlenet.key.passphrase +- dataverse.spi.pidproviders.directory +- dataverse.solr.concurrency.max-async-indexes + +[⬅️ Go back](#multiple-pid-sup) + +## SMTP Settings: + +- dataverse.mail.system-email +- dataverse.mail.mta.host +- dataverse.mail.mta.port +- dataverse.mail.mta.ssl.enable +- dataverse.mail.mta.auth +- dataverse.mail.mta.user +- dataverse.mail.mta.password +- dataverse.mail.mta.allow-utf8-addresses +- Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). + +[⬅️ Go back](#simplified-smtp-configuration) + +## Database Settings: + +- :RateLimitingDefaultCapacityTiers +- :RateLimitingCapacityByTierAndAction +- :StoreIngestedTabularFilesWithVarHeaders + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.2 Milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.2+is%3Aclosed) in GitHub. + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. + ## 💻 Upgrade instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. @@ -370,59 +433,3 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta curl http://localhost:8080/api/admin/index ``` -*** - -## ⚙️ New Settings - -### MicroProfile Settings - -*The * indicates a provider id indicating which provider the setting is for* - -- dataverse.pid.providers -- dataverse.pid.default-provider -- dataverse.pid.*.type -- dataverse.pid.*.label -- dataverse.pid.*.authority -- dataverse.pid.*.shoulder -- dataverse.pid.*.identifier-generation-style -- dataverse.pid.*.datafile-pid-format -- dataverse.pid.*.managed-list -- dataverse.pid.*.excluded-list -- dataverse.pid.*.datacite.mds-api-url -- dataverse.pid.*.datacite.rest-api-url -- dataverse.pid.*.datacite.username -- dataverse.pid.*.datacite.password -- dataverse.pid.*.ezid.api-url -- dataverse.pid.*.ezid.username -- dataverse.pid.*.ezid.password -- dataverse.pid.*.permalink.base-url -- dataverse.pid.*.permalink.separator -- dataverse.pid.*.handlenet.index -- dataverse.pid.*.handlenet.independent-service -- dataverse.pid.*.handlenet.auth-handle -- dataverse.pid.*.handlenet.key.path -- dataverse.pid.*.handlenet.key.passphrase -- dataverse.spi.pidproviders.directory -- dataverse.solr.concurrency.max-async-indexes - -[⬅️ Go back](#multiple-pid-sup) - -## SMTP Settings: - -- dataverse.mail.system-email -- dataverse.mail.mta.host -- dataverse.mail.mta.port -- dataverse.mail.mta.ssl.enable -- dataverse.mail.mta.auth -- dataverse.mail.mta.user -- dataverse.mail.mta.password -- dataverse.mail.mta.allow-utf8-addresses -- Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). - -[⬅️ Go back](#simplified-smtp-configuration) - -## Database Settings: - -- :RateLimitingDefaultCapacityTiers -- :RateLimitingCapacityByTierAndAction -- :StoreIngestedTabularFilesWithVarHeaders From bfd94074018d7dd85b1c5b6a0632ab5ec5311ab8 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 15:12:09 -0400 Subject: [PATCH 1044/1112] Table of contets reorg --- doc/release-notes/6.2-release-notes.md | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 81db2e3a93e..6becea8d05d 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -12,10 +12,10 @@ Thank you to all of the community members who contributed code, suggestions, bug - [🌐 API](#-api) - [⚠️ Backward Incompatibilities](#%EF%B8%8F-backward-incompatibilities) - [📖 Guides](#-guides) -- [💻 Upgrade instructions](#-upgrade-instructions) - [⚙️ New Settings](#%EF%B8%8F-new-settings) - - +- [📋 Complete List of Changes](#-upgrade-instructions) +- [🛟 Getting Help](#-upgrade-instructions) +- [💻 Upgrade instructions](#-upgrade-instructions) ## 💡Release Highlights @@ -312,9 +312,7 @@ A new QA Guide is intended mostly for the core development team but may be of in - dataverse.spi.pidproviders.directory - dataverse.solr.concurrency.max-async-indexes -[⬅️ Go back](#multiple-pid-sup) - -## SMTP Settings: +### SMTP Settings: - dataverse.mail.system-email - dataverse.mail.mta.host @@ -326,19 +324,17 @@ A new QA Guide is intended mostly for the core development team but may be of in - dataverse.mail.mta.allow-utf8-addresses - Plus many more for advanced usage and special provider requirements. See [configuration guide for a full list](https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-mail-mta). -[⬅️ Go back](#simplified-smtp-configuration) - -## Database Settings: +### Database Settings: - :RateLimitingDefaultCapacityTiers - :RateLimitingCapacityByTierAndAction - :StoreIngestedTabularFilesWithVarHeaders -## Complete List of Changes +## 📋 Complete List of Changes For the complete list of code changes in this release, see the [6.2 Milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.2+is%3Aclosed) in GitHub. -## Getting Help +## 🛟 Getting Help For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. From 5f7c99f94ad5f9888dcdccf3fa4a43c80ffe9640 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 15:18:07 -0400 Subject: [PATCH 1045/1112] Table of contents final --- doc/release-notes/6.2-release-notes.md | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 6becea8d05d..d172ba48c29 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -13,8 +13,8 @@ Thank you to all of the community members who contributed code, suggestions, bug - [⚠️ Backward Incompatibilities](#%EF%B8%8F-backward-incompatibilities) - [📖 Guides](#-guides) - [⚙️ New Settings](#%EF%B8%8F-new-settings) -- [📋 Complete List of Changes](#-upgrade-instructions) -- [🛟 Getting Help](#-upgrade-instructions) +- [📋 Complete List of Changes](#-complete-list-of-changes) +- [🛟 Getting Help](#-getting-help) - [💻 Upgrade instructions](#-upgrade-instructions) ## 💡Release Highlights @@ -127,6 +127,8 @@ pip install -r requirements.txt A new MicroProfile setting called `dataverse.solr.concurrency.max-async-indexes` has been added that controls the maximum number of simultaneously running asynchronous dataset index operations (defaults to 4). +[⬆️](#-table-of-contents) + ## 🪲 Bug fixes ### Publication Status Facet Restored @@ -152,6 +154,8 @@ OAI-PMH error handling has been improved to display a machine-readable error in A bug introduced with the guestbook-at-request, requests are not deleted when granted, they are now given the state granted. +[⬆️](#-table-of-contents) + ## 💾 Persistence ### Missing Database Constraints @@ -259,10 +263,14 @@ The API endpoint `api/datasets/{id}/metadata` has been changed to default to the An experimental Make Data Count processingState API has been added. For now it has been documented in the (developer guide)[https://guides.dataverse.org/en/6.2/developers/make-data-count.html#processing-archived-logs]. +[⬆️](#-table-of-contents) + ## ⚠️ Backward Incompatibilities To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/6.2/develop/api/changelog.html). +[⬆️](#-table-of-contents) + ## 📖 Guides ### Container Guide, Documentation for Faster Redeploy @@ -279,6 +287,8 @@ The Container Guide now containers a tutorial for running Dataverse in container A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/6.2/develop/qa +[⬆️](#-table-of-contents) + ## ⚙️ New Settings ### MicroProfile Settings @@ -334,10 +344,14 @@ A new QA Guide is intended mostly for the core development team but may be of in For the complete list of code changes in this release, see the [6.2 Milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.2+is%3Aclosed) in GitHub. +[⬆️](#-table-of-contents) + ## 🛟 Getting Help For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. +[⬆️](#-table-of-contents) + ## 💻 Upgrade instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. @@ -428,4 +442,4 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta ``` curl http://localhost:8080/api/admin/index ``` - +[⬆️](#-table-of-contents) From f82ebdaaa27f3e183a5c9f43fa100eacf20e2a9d Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 15:20:08 -0400 Subject: [PATCH 1046/1112] TOC fix --- doc/release-notes/6.2-release-notes.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index d172ba48c29..bbf8d010ebc 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -127,7 +127,7 @@ pip install -r requirements.txt A new MicroProfile setting called `dataverse.solr.concurrency.max-async-indexes` has been added that controls the maximum number of simultaneously running asynchronous dataset index operations (defaults to 4). -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) ## 🪲 Bug fixes @@ -154,7 +154,7 @@ OAI-PMH error handling has been improved to display a machine-readable error in A bug introduced with the guestbook-at-request, requests are not deleted when granted, they are now given the state granted. -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) ## 💾 Persistence @@ -263,13 +263,13 @@ The API endpoint `api/datasets/{id}/metadata` has been changed to default to the An experimental Make Data Count processingState API has been added. For now it has been documented in the (developer guide)[https://guides.dataverse.org/en/6.2/developers/make-data-count.html#processing-archived-logs]. -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) ## ⚠️ Backward Incompatibilities To view a list of changes that can be impactful to your implementation please visit our detailed [list of changes to the API](https://guides.dataverse.org/en/6.2/develop/api/changelog.html). -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) ## 📖 Guides @@ -287,7 +287,7 @@ The Container Guide now containers a tutorial for running Dataverse in container A new QA Guide is intended mostly for the core development team but may be of interest to contributors on: https://guides.dataverse.org/en/6.2/develop/qa -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) ## ⚙️ New Settings @@ -344,13 +344,13 @@ A new QA Guide is intended mostly for the core development team but may be of in For the complete list of code changes in this release, see the [6.2 Milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.2+is%3Aclosed) in GitHub. -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) ## 🛟 Getting Help For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) ## 💻 Upgrade instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. @@ -442,4 +442,4 @@ curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/ta ``` curl http://localhost:8080/api/admin/index ``` -[⬆️](#-table-of-contents) +[⬆️](#table-of-contents) From 2035585303925b2cc8ef0fc4d74bc5aee636d67b Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 15:22:44 -0400 Subject: [PATCH 1047/1112] toc fix 2 --- doc/release-notes/6.2-release-notes.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index bbf8d010ebc..494bab39553 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -182,6 +182,7 @@ This release adds install script support for the new permissions model in Postgr PostgreSQL 13 remains the version used with automated testing. +[⬆️](#table-of-contents) ## 🌐 API From 8bbe9744e8945a3318a61f3a23686950d6ca1536 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 28 Mar 2024 15:32:31 -0400 Subject: [PATCH 1048/1112] more links, tweaks --- doc/release-notes/6.2-release-notes.md | 28 +++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 494bab39553..3c2a116ac7a 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -37,6 +37,8 @@ The Popup for returning to author now allows to type in a message to explain the Please note that this note is mandatory, but that you can still type a creative and meaningful comment such as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. +For more information, see #10137. + ### Support for Using Multiple PID Providers @@ -92,14 +94,18 @@ For more information, see [#10360](https://github.com/IQSS/dataverse/issues/1036 When a Dataverse installation is configured to use a metadata exporter for the [Croissant](https://github.com/mlcommons/croissant) format, the content of the JSON-LD in the **<head>** of dataset landing pages will be replaced with that format. However, both JSON-LD and Croissant will still be available for download from the dataset page and API. +For more information, see #10382. + ### Harvesting Handle Missing Controlled Values -Allows datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. For mor information view the changes to the endpoint [here](#harvesting-client-endpoint-extended). +Allows datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. For more information, view the changes to the endpoint [here](#harvesting-client-endpoint-extended). ### Add .QPJ and .QMD Extensions to Shapefile Handling Support for **.qpj** and **.qmd** files in shapefile uploads has been introduced, ensuring that these files are properly recognized and handled as part of geospatial datasets in Dataverse. +For more information, see #10305. + ### Ingested Tabular Data Files Can Be Stored Without the Variable Name Header Tabular Data Ingest can now save the generated archival files with the list of variable names added as the first tab-delimited line. @@ -110,12 +116,16 @@ This behavior is controlled by the new setting **:StoreIngestedTabularFilesWithV We are planning to add an API for converting existing legacy tabular files in a future release. +For more information, see #10282. + ### Uningest/Reingest Options Available in the File Page Edit Menu New Uningest/Reingest options are available in the File Page Edit menu. Ingest errors can be cleared by users who can published the associated dataset and by superusers, allowing for a successful ingest to be undone or retried (e.g. after a Dataverse version update or if ingest size limits are changed). The /api/files//uningest api also now allows users who can publish the dataset to undo an ingest failure. +For more information, see #10319. + ### Sphinx Guides Now Support Markdown Format and Tabs Our guides now support the Markdown format with the extension **.md**. Additionally, an option to create tabs in the guides using [Sphinx Tabs](https://sphinx-tabs.readthedocs.io) has been added. (You can see the tabs in action in the "dev usage" page of the Container Guide.) To continue building the guides, you will need to install this new dependency by re-running: @@ -123,10 +133,14 @@ Our guides now support the Markdown format with the extension **.md**. Additiona pip install -r requirements.txt ``` +For more information, see #10111. + ### Number of Concurrent Indexing Operations Now Configurable A new MicroProfile setting called `dataverse.solr.concurrency.max-async-indexes` has been added that controls the maximum number of simultaneously running asynchronous dataset index operations (defaults to 4). +For more information, see #10388. + [⬆️](#table-of-contents) ## 🪲 Bug fixes @@ -141,14 +155,14 @@ The permissions required to assign a role have been fixed. It is no longer possi ### Geospatial Metadata Block Fields for North and South Renamed -The Geospatial metadata block fields for north and south were labeled incorrectly as ‘Longitudes,’ as reported in #5645. After updating to this version of Dataverse, users will need to update all the endpoints that used ‘northLongitude’ and ‘southLongitude’ to ‘northLatitude’ and ‘southLatitude,’ respectively. +The Geospatial metadata block fields for north and south were labeled incorrectly as longitudes, as reported in #5645. After updating to this version of Dataverse, users will need to update any API client code used "northLongitude" and "southLongitude" to "northLatitude" and "southLatitude", respectively, as [mentioned](https://groups.google.com/g/dataverse-community/c/5qpOIZUSL6A/m/nlYGEXkYAAAJ) on the mailing list. ### OAI-PMH Error Handling Has Been Improved OAI-PMH error handling has been improved to display a machine-readable error in XML rather than a 500 error with no further information. -> - /oai?foo=bar will show "No argument 'verb' found" -> - /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" +- /oai?foo=bar will show "No argument 'verb' found" +- /oai?verb=foo&verb=bar will show "Verb must be singular, given: '[foo, bar]'" ### Granting File Access Without Access Request @@ -233,7 +247,7 @@ It is now possible to retrieve via API the file citation as it appears on the fi This API is not for downloading various citation formats such as EndNote XML, RIS, or BibTeX. -For mor information check the documentation on [this link](https://guides.dataverse.org/en/6.2/api/native-api.html#get-file-citation-as-json) +For more information check the documentation on [this link](https://guides.dataverse.org/en/6.2/api/native-api.html#get-file-citation-as-json) ### Files Endpoint Extended @@ -282,7 +296,7 @@ Also in the context of containers, a new option to skip deployment has been adde ### Evaluation Version Tutorial on the Containers Guide -The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container +The Container Guide now containers a tutorial for running Dataverse in containers for demo or evaluation purposes: https://guides.dataverse.org/en/6.2/container/running/demo.html ### New QA Guide @@ -353,7 +367,7 @@ For help with upgrading, installing, or general questions please post to the [Da [⬆️](#table-of-contents) -## 💻 Upgrade instructions +## 💻 Upgrade Instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.1. From c67b7679c4db714abae0adf31348a232bca2e780 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 28 Mar 2024 15:49:24 -0400 Subject: [PATCH 1049/1112] ask for the issue number and maybe PR number --- doc/sphinx-guides/source/developers/version-control.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index a9a60de380c..07922b56b86 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -119,7 +119,7 @@ Here's how to add a release note snippet to your pull request: - Create a Markdown file under ``doc/release-notes``. You can reuse the name of your branch and append ".md" to it, e.g. ``3728-doc-apipolicy-fix.md`` - Edit the snippet to include anything you think should be mentioned in the release notes. Please include the following if they apply: - - Descriptions of new features or bug fixed, including a link to the HTML preview of the docs you wrote (e.g. https://dataverse-guide--9939.org.readthedocs.build/en/9939/installation/config.html#smtp-email-configuration ) + - Descriptions of new features or bug fixed, including a link to the HTML preview of the docs you wrote (e.g. https://dataverse-guide--9939.org.readthedocs.build/en/9939/installation/config.html#smtp-email-configuration ) and the phrase "For more information, see #3728" (the issue number). If you know the PR number, you can add that too. - New configuration settings - Upgrade instructions - Etc. From ad91b5b62b8aef2adffad15166a565acac83273e Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 16:32:34 -0400 Subject: [PATCH 1050/1112] Geospatial command fix --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 3c2a116ac7a..143215f1bdb 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -422,7 +422,7 @@ As noted above, deployment of the war file might take several minutes due a data ``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/geospatial.tsv - curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv + curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/geospatial.tsv wget https://github.com/IQSS/dataverse/releases/download/v6.2/citation.tsv From ade981e2da11b1be68ddbf6ed0c1045f26e107d5 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 28 Mar 2024 17:06:24 -0400 Subject: [PATCH 1051/1112] Geospatial tooltip updated --- doc/release-notes/10397-geospatial-tooltip-fix.md | 2 ++ scripts/api/data/metadatablocks/geospatial.tsv | 8 ++++---- src/main/java/propertyFiles/geospatial.properties | 8 ++++---- 3 files changed, 10 insertions(+), 8 deletions(-) create mode 100644 doc/release-notes/10397-geospatial-tooltip-fix.md diff --git a/doc/release-notes/10397-geospatial-tooltip-fix.md b/doc/release-notes/10397-geospatial-tooltip-fix.md new file mode 100644 index 00000000000..f3c707ed00a --- /dev/null +++ b/doc/release-notes/10397-geospatial-tooltip-fix.md @@ -0,0 +1,2 @@ +We have updated the tooltip for the Geospatial metadata where previously the use of comas was incorrectly suggested. + diff --git a/scripts/api/data/metadatablocks/geospatial.tsv b/scripts/api/data/metadatablocks/geospatial.tsv index 09d19c608e5..11408317410 100644 --- a/scripts/api/data/metadatablocks/geospatial.tsv +++ b/scripts/api/data/metadatablocks/geospatial.tsv @@ -8,10 +8,10 @@ otherGeographicCoverage Other Other information on the geographic coverage of the data. text 4 #VALUE, FALSE FALSE FALSE TRUE FALSE FALSE geographicCoverage geospatial geographicUnit Geographic Unit Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region. text 5 TRUE FALSE TRUE TRUE FALSE FALSE geospatial geographicBoundingBox Geographic Bounding Box The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. none 6 FALSE FALSE TRUE FALSE FALSE FALSE geospatial - westLongitude Westernmost (Left) Longitude Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= West Bounding Longitude Value <= 180,0. text 7 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial - eastLongitude Easternmost (Right) Longitude Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0. text 8 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial - northLatitude Northernmost (Top) Latitude Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0. text 9 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial - southLatitude Southernmost (Bottom) Latitude Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0. text 10 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + westLongitude Westernmost (Left) Longitude Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180.0 <= West Bounding Longitude Value <= 180.0. text 7 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + eastLongitude Easternmost (Right) Longitude Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180.0 <= East Bounding Longitude Value <= 180.0. text 8 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + northLatitude Northernmost (Top) Latitude Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90.0 <= North Bounding Latitude Value <= 90.0. text 9 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + southLatitude Southernmost (Bottom) Latitude Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90.0 <= South Bounding Latitude Value <= 90.0. text 10 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial #controlledVocabulary DatasetField Value identifier displayOrder country Afghanistan 0 country Albania 1 diff --git a/src/main/java/propertyFiles/geospatial.properties b/src/main/java/propertyFiles/geospatial.properties index ce258071c27..2659c2a3cc9 100644 --- a/src/main/java/propertyFiles/geospatial.properties +++ b/src/main/java/propertyFiles/geospatial.properties @@ -19,10 +19,10 @@ datasetfieldtype.city.description=The name of the city that the Dataset is about datasetfieldtype.otherGeographicCoverage.description=Other information on the geographic coverage of the data. datasetfieldtype.geographicUnit.description=Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region. datasetfieldtype.geographicBoundingBox.description=The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. -datasetfieldtype.westLongitude.description=Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= West Bounding Longitude Value <= 180,0. -datasetfieldtype.eastLongitude.description=Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0. -datasetfieldtype.northLatitude.description=Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0. -datasetfieldtype.southLatitude.description=Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0. +datasetfieldtype.westLongitude.description=Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180.0 <= West Bounding Longitude Value <= 180.0. +datasetfieldtype.eastLongitude.description=Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180.0 <= East Bounding Longitude Value <= 180.0. +datasetfieldtype.northLatitude.description=Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90.0 <= North Bounding Latitude Value <= 90.0. +datasetfieldtype.southLatitude.description=Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90.0 <= South Bounding Latitude Value <= 90.0. datasetfieldtype.geographicCoverage.watermark= datasetfieldtype.country.watermark= datasetfieldtype.state.watermark= From 6810b593f8e5c346f2af3cbf541c7275eb2affee Mon Sep 17 00:00:00 2001 From: landreev Date: Thu, 28 Mar 2024 17:16:45 -0400 Subject: [PATCH 1052/1112] Update 10397-geospatial-tooltip-fix.md --- doc/release-notes/10397-geospatial-tooltip-fix.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10397-geospatial-tooltip-fix.md b/doc/release-notes/10397-geospatial-tooltip-fix.md index f3c707ed00a..cfe2ee283ed 100644 --- a/doc/release-notes/10397-geospatial-tooltip-fix.md +++ b/doc/release-notes/10397-geospatial-tooltip-fix.md @@ -1,2 +1,2 @@ -We have updated the tooltip for the Geospatial metadata where previously the use of comas was incorrectly suggested. +We have updated the tooltips in the Geospatial metadata block, where the use of comas instead of dots in coordinate values was incorrectly suggested. From 25912c6ce08ea62a78d4a056f9e79c1f2f18b29a Mon Sep 17 00:00:00 2001 From: landreev Date: Thu, 28 Mar 2024 17:50:40 -0400 Subject: [PATCH 1053/1112] Update 10397-geospatial-tooltip-fix.md --- doc/release-notes/10397-geospatial-tooltip-fix.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10397-geospatial-tooltip-fix.md b/doc/release-notes/10397-geospatial-tooltip-fix.md index cfe2ee283ed..0774dc1860e 100644 --- a/doc/release-notes/10397-geospatial-tooltip-fix.md +++ b/doc/release-notes/10397-geospatial-tooltip-fix.md @@ -1,2 +1,2 @@ -We have updated the tooltips in the Geospatial metadata block, where the use of comas instead of dots in coordinate values was incorrectly suggested. +We have updated the tooltips in the Geospatial metadata block, where the use of commas instead of dots in coordinate values was incorrectly suggested. From c4fce44b0a16649645d7d201066de2a1553a7aa1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 29 Mar 2024 08:31:48 +0000 Subject: [PATCH 1054/1112] Added: IT for Dataverses API listMetadataBlocks --- .../iq/dataverse/api/DataversesIT.java | 53 ++++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 8 +++ 2 files changed, 60 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 3330d11435a..eafff5d2d4a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -29,6 +29,7 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; @@ -691,5 +692,55 @@ public void testAttributesApi() throws Exception { deleteCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode()); } - + + @Test + public void testListMetadataBlocks() { + Response createUserResponse = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response setMetadataBlocks = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); + setMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + + // Dataverse not found + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks("-1", false, false, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Existent Dataverse and no optional params + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, false, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fields", equalTo(null)) + .body("data.size()", equalTo(2)); + + // Existent Dataverse and onlyDisplayedOnCreate=true + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, false, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fields", equalTo(null)) + .body("data[0].displayName", equalTo("Citation Metadata")) + .body("data.size()", equalTo(1)); + + // Existent Dataverse and returnDatasetFieldTypes=true + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, true, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fields", not(equalTo(null))) + .body("data.size()", equalTo(2)); + + // Existent Dataverse and onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocksResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fields", not(equalTo(null))) + .body("data[0].displayName", equalTo("Citation Metadata")) + .body("data.size()", equalTo(1)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 2f94bd714c0..575c5df1aa3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -647,6 +647,14 @@ static Response setMetadataBlocks(String dataverseAlias, JsonArrayBuilder blocks .post("/api/dataverses/" + dataverseAlias + "/metadatablocks"); } + static Response listMetadataBlocks(String dataverseAlias, boolean onlyDisplayedOnCreate, boolean returnDatasetFieldTypes, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("onlyDisplayedOnCreate", onlyDisplayedOnCreate) + .queryParam("returnDatasetFieldTypes", returnDatasetFieldTypes) + .get("/api/dataverses/" + dataverseAlias + "/metadatablocks"); + } + static Response listMetadataBlocks(boolean onlyDisplayedOnCreate, boolean returnDatasetFieldTypes) { return given() .queryParam("onlyDisplayedOnCreate", onlyDisplayedOnCreate) From b405be091fc3a2ffbcb3293bfe07da17f36a557d Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 29 Mar 2024 08:34:21 +0000 Subject: [PATCH 1055/1112] Changed: renamed variable in IT --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index eafff5d2d4a..0c21571a06b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -702,8 +702,8 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - Response setMetadataBlocks = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); - setMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); + setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); // Dataverse not found Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks("-1", false, false, apiToken); From 6d1c4f013025bb866ae5ab6422d11853cfd05540 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 29 Mar 2024 10:03:38 +0000 Subject: [PATCH 1056/1112] Added: unauthorized test case to DataversesIT listMetadataBlocks --- .../iq/dataverse/api/DataversesIT.java | 27 ++++++++++++------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 0c21571a06b..d7f7ff39cbd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -18,16 +18,12 @@ import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; import jakarta.ws.rs.core.Response.Status; -import static jakarta.ws.rs.core.Response.Status.OK; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; -import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; -import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; -import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; + +import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -709,7 +705,7 @@ public void testListMetadataBlocks() { Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks("-1", false, false, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); - // Existent Dataverse and no optional params + // Existent dataverse and no optional params listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, false, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() @@ -717,7 +713,7 @@ public void testListMetadataBlocks() { .body("data[0].fields", equalTo(null)) .body("data.size()", equalTo(2)); - // Existent Dataverse and onlyDisplayedOnCreate=true + // Existent dataverse and onlyDisplayedOnCreate=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, false, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() @@ -726,7 +722,7 @@ public void testListMetadataBlocks() { .body("data[0].displayName", equalTo("Citation Metadata")) .body("data.size()", equalTo(1)); - // Existent Dataverse and returnDatasetFieldTypes=true + // Existent dataverse and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, true, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() @@ -734,7 +730,7 @@ public void testListMetadataBlocks() { .body("data[0].fields", not(equalTo(null))) .body("data.size()", equalTo(2)); - // Existent Dataverse and onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true + // Existent dataverse and onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() @@ -742,5 +738,16 @@ public void testListMetadataBlocks() { .body("data[0].fields", not(equalTo(null))) .body("data[0].displayName", equalTo("Citation Metadata")) .body("data.size()", equalTo(1)); + + // User has no permissions on the requested dataverse + Response createSecondUserResponse = UtilIT.createRandomUser(); + String secondApiToken = UtilIT.getApiTokenFromResponse(createSecondUserResponse); + + createDataverseResponse = UtilIT.createRandomDataverse(secondApiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String secondDataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(secondDataverseAlias, true, true, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); } } From 5a1ca07a61146ae99f68078a7575ffafbaef8d6c Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 29 Mar 2024 10:09:07 +0000 Subject: [PATCH 1057/1112] Added: docs for new optional params in dataverses//metadatablocks API endpoint --- doc/sphinx-guides/source/api/native-api.rst | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index d57962c1ce7..1cba832b474 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -447,6 +447,27 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks" +This endpoint supports the following optional query parameters: + +- ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. +- ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false. + +An example using the optional query parameters is presented below: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true" + Define Metadata Blocks for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From ee14b2686acc886924f3758c583edcbad1929aa5 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 29 Mar 2024 10:04:52 -0400 Subject: [PATCH 1058/1112] #10422 incorporate tooltip fix --- doc/release-notes/10397-geospatial-tooltip-fix.md | 2 -- doc/release-notes/6.2-release-notes.md | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 100644 doc/release-notes/10397-geospatial-tooltip-fix.md diff --git a/doc/release-notes/10397-geospatial-tooltip-fix.md b/doc/release-notes/10397-geospatial-tooltip-fix.md deleted file mode 100644 index 0774dc1860e..00000000000 --- a/doc/release-notes/10397-geospatial-tooltip-fix.md +++ /dev/null @@ -1,2 +0,0 @@ -We have updated the tooltips in the Geospatial metadata block, where the use of commas instead of dots in coordinate values was incorrectly suggested. - diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 143215f1bdb..cc4b03bbc20 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -156,6 +156,7 @@ The permissions required to assign a role have been fixed. It is no longer possi ### Geospatial Metadata Block Fields for North and South Renamed The Geospatial metadata block fields for north and south were labeled incorrectly as longitudes, as reported in #5645. After updating to this version of Dataverse, users will need to update any API client code used "northLongitude" and "southLongitude" to "northLatitude" and "southLatitude", respectively, as [mentioned](https://groups.google.com/g/dataverse-community/c/5qpOIZUSL6A/m/nlYGEXkYAAAJ) on the mailing list. +Also, we have updated the tooltips in the Geospatial metadata block, where the use of commas instead of dots in coordinate values was incorrectly suggested. ### OAI-PMH Error Handling Has Been Improved From bd333910adbeccd98d372e19b526674a688ae76d Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 29 Mar 2024 10:13:29 -0400 Subject: [PATCH 1059/1112] #10442 add note for harvest redirects --- doc/release-notes/10254-fix-harvested-redirects.md | 1 - doc/release-notes/6.2-release-notes.md | 4 ++++ 2 files changed, 4 insertions(+), 1 deletion(-) delete mode 100644 doc/release-notes/10254-fix-harvested-redirects.md diff --git a/doc/release-notes/10254-fix-harvested-redirects.md b/doc/release-notes/10254-fix-harvested-redirects.md deleted file mode 100644 index 02ee5ddaf4d..00000000000 --- a/doc/release-notes/10254-fix-harvested-redirects.md +++ /dev/null @@ -1 +0,0 @@ -Redirects from search cards back to the original source for datasets harvested from "Generic OAI Archives", i.e. non-Dataverse OAI servers, have been fixed. diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index cc4b03bbc20..b14f769f20b 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -169,6 +169,10 @@ OAI-PMH error handling has been improved to display a machine-readable error in A bug introduced with the guestbook-at-request, requests are not deleted when granted, they are now given the state granted. +### Harvesting redirects fixed + +Redirects from search cards back to the original source for datasets harvested from "Generic OAI Archives", i.e. non-Dataverse OAI servers, have been fixed. + [⬆️](#table-of-contents) ## 💾 Persistence From 4fe90539fcba40263c2658216b13bd7afd4a3a48 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 29 Mar 2024 10:56:52 -0400 Subject: [PATCH 1060/1112] add order to test --- .../edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 4466182b435..970545376d7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -6,9 +6,7 @@ import static io.restassured.RestAssured.given; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.*; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; @@ -27,6 +25,7 @@ * /api/harvest/clients/ api to run an actual harvest of a control set and * then validate the resulting harvested content. */ +@TestMethodOrder(MethodOrderer.MethodName.class) public class HarvestingClientsIT { private static final Logger logger = Logger.getLogger(HarvestingClientsIT.class.getCanonicalName()); From 9d11921f86a23b646972b1ef6775c8cfa63fb612 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 29 Mar 2024 11:08:52 -0400 Subject: [PATCH 1061/1112] #10422 fix wording of note --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index b14f769f20b..5ca03e3231c 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -98,7 +98,7 @@ For more information, see #10382. ### Harvesting Handle Missing Controlled Values -Allows datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. For more information, view the changes to the endpoint [here](#harvesting-client-endpoint-extended). +Allows datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse installation but are not in the harvesting Dataverse installation. For more information, view the changes to the endpoint [here](#harvesting-client-endpoint-extended). ### Add .QPJ and .QMD Extensions to Shapefile Handling From 30356fd740a5d6ca09b528939ee28bab429914c5 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 29 Mar 2024 12:07:53 -0400 Subject: [PATCH 1062/1112] add order to test --- .../iq/dataverse/api/HarvestingClientsIT.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 970545376d7..3d630adbf41 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -25,7 +25,7 @@ * /api/harvest/clients/ api to run an actual harvest of a control set and * then validate the resulting harvested content. */ -@TestMethodOrder(MethodOrderer.MethodName.class) +@TestMethodOrder(MethodOrderer.OrderAnnotation.class) public class HarvestingClientsIT { private static final Logger logger = Logger.getLogger(HarvestingClientsIT.class.getCanonicalName()); @@ -87,6 +87,7 @@ private static void setupCollection() { } @Test + @Order(1) public void testCreateEditDeleteClient() throws InterruptedException { // This method focuses on testing the native Dataverse harvesting client // API. @@ -169,13 +170,15 @@ public void testCreateEditDeleteClient() throws InterruptedException { } @Test - public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws InterruptedException { - harvestingClientRun(false); - } - @Test + @Order(2) public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { harvestingClientRun(true); } + @Test + @Order(3) + public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws InterruptedException { + harvestingClientRun(false); + } private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws InterruptedException { int expectedNumberOfSetsHarvested = allowHarvestingMissingCVV ? DATASETS_IN_CONTROL_SET : DATASETS_IN_CONTROL_SET - 1; From 934bdf128eed12eb2dc1be56ee1b0b9674e39bca Mon Sep 17 00:00:00 2001 From: landreev Date: Fri, 29 Mar 2024 12:20:37 -0400 Subject: [PATCH 1063/1112] Update 6.2-release-notes.md --- doc/release-notes/6.2-release-notes.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 5ca03e3231c..2b95568105a 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -387,14 +387,14 @@ In the following commands we assume that Payara 6 is installed in `/usr/local/pa (or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) -1\. Usually, when a Solr schema update is released, we recommend deploying the new version of Dataverse, then updating the `schema.xml` on the solr side. With 6.2, we recommend to install the base schema first. Without it Dataverse 6.2 is not going to be able to show any results after the initial deployment. If your instance is using any custom metadata blocks, you will need to further modify the schema, see the laset step of this instruction (step 8). +1\. Usually, when a Solr schema update is released, we recommend deploying the new version of Dataverse, then updating the `schema.xml` on the solr side. With 6.2, we recommend to install the base schema first. Without it Dataverse 6.2 is not going to be able to show any results after the initial deployment. If your instance is using any custom metadata blocks, you will need to further modify the schema, see the last step of this instruction (step 8). - Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.2/installation/prerequisites.html#solr-init-script)) - Replace schema.xml - - `cd /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` - `wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/9.3.0/schema.xml` + - `cp schema.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` - Start Solr instance (usually `service solr start`, depending on Solr/OS) From 642b65905b3bb2152a9326bc4c6ceb8f206b9e9c Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 29 Mar 2024 13:34:21 -0400 Subject: [PATCH 1064/1112] fix test --- .../edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 3d630adbf41..3582301ff54 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -181,7 +181,6 @@ public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws In } private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws InterruptedException { - int expectedNumberOfSetsHarvested = allowHarvestingMissingCVV ? DATASETS_IN_CONTROL_SET : DATASETS_IN_CONTROL_SET - 1; // This test will create a client and attempt to perform an actual // harvest and validate the resulting harvested content. @@ -266,7 +265,11 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte assertEquals(harvestTimeStamp, responseJsonPath.getString("data.lastNonEmpty")); // d) Confirm that the correct number of datasets have been harvested: - assertEquals(expectedNumberOfSetsHarvested, responseJsonPath.getInt("data.lastDatasetsHarvested")); + if (allowHarvestingMissingCVV) { + assertEquals(DATASETS_IN_CONTROL_SET, responseJsonPath.getInt("data.lastDatasetsHarvested")); + } else { + assertTrue(responseJsonPath.getInt("data.lastDatasetsHarvested") < DATASETS_IN_CONTROL_SET); + } // ok, it looks like the harvest has completed successfully. break; From 91a9ae95400ae02556ad7dbcef6335b9c2d12e85 Mon Sep 17 00:00:00 2001 From: landreev Date: Fri, 29 Mar 2024 13:44:06 -0400 Subject: [PATCH 1065/1112] Update 6.2-release-notes.md --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index 2b95568105a..a80ee8d79e9 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -422,7 +422,7 @@ As noted above, deployment of the war file might take several minutes due a data - `service payara stop` - `service payara start` -7\. Update the following Metadata Blocks: +7\. Update the following Metadata Blocks to reflect the incremental improvements made to the handling of core metadata fields: ``` wget https://github.com/IQSS/dataverse/releases/download/v6.2/geospatial.tsv From a23934f90e636fb7d9fc7030126cb93a8102dcc0 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 29 Mar 2024 19:23:46 +0000 Subject: [PATCH 1066/1112] Added: release notes for #10389 --- doc/release-notes/10389-metadatablocks-api-extension.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 doc/release-notes/10389-metadatablocks-api-extension.md diff --git a/doc/release-notes/10389-metadatablocks-api-extension.md b/doc/release-notes/10389-metadatablocks-api-extension.md new file mode 100644 index 00000000000..f34b60418cd --- /dev/null +++ b/doc/release-notes/10389-metadatablocks-api-extension.md @@ -0,0 +1,4 @@ +New optional query parameters added to ``api/metadatablocks`` and ``api/dataverses/{id}/metadatablocks`` endpoints: + +- ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. +- ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false. From 58d81648304949fce1f936e1a631c1147f93a3c0 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 29 Mar 2024 15:27:17 -0400 Subject: [PATCH 1067/1112] #10422 wording change --- doc/release-notes/6.2-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.2-release-notes.md b/doc/release-notes/6.2-release-notes.md index a80ee8d79e9..f694703f0a6 100644 --- a/doc/release-notes/6.2-release-notes.md +++ b/doc/release-notes/6.2-release-notes.md @@ -277,7 +277,7 @@ For more information visit the full native API guide on [this link](https://guid ### Endpoint Fixed: Datasets Metadata -The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset that the user has access. +The API endpoint `api/datasets/{id}/metadata` has been changed to default to the latest version of the dataset to which the user has access. ### Experimental Make Data Count processingState API From 64ce1b2230cd4a63ea66535e935b4d675d9484a6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 29 Mar 2024 19:31:55 +0000 Subject: [PATCH 1068/1112] Added: #10389 release note tweak --- doc/release-notes/10389-metadatablocks-api-extension.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/10389-metadatablocks-api-extension.md b/doc/release-notes/10389-metadatablocks-api-extension.md index f34b60418cd..9b14100d33c 100644 --- a/doc/release-notes/10389-metadatablocks-api-extension.md +++ b/doc/release-notes/10389-metadatablocks-api-extension.md @@ -2,3 +2,5 @@ New optional query parameters added to ``api/metadatablocks`` and ``api/datavers - ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. - ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false. + +Added new ``displayOnCreate`` field to the MetadataBlock and DatasetFieldType payloads. From 2ed51c290ffe978fcb3dca36e0b5b046cfded2c4 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 29 Mar 2024 17:47:50 -0400 Subject: [PATCH 1069/1112] fix test --- .../edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 3582301ff54..6f10de10997 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -6,7 +6,9 @@ import static io.restassured.RestAssured.given; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; -import org.junit.jupiter.api.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; @@ -25,7 +27,6 @@ * /api/harvest/clients/ api to run an actual harvest of a control set and * then validate the resulting harvested content. */ -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) public class HarvestingClientsIT { private static final Logger logger = Logger.getLogger(HarvestingClientsIT.class.getCanonicalName()); @@ -87,7 +88,6 @@ private static void setupCollection() { } @Test - @Order(1) public void testCreateEditDeleteClient() throws InterruptedException { // This method focuses on testing the native Dataverse harvesting client // API. @@ -170,12 +170,10 @@ public void testCreateEditDeleteClient() throws InterruptedException { } @Test - @Order(2) public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { harvestingClientRun(true); } @Test - @Order(3) public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws InterruptedException { harvestingClientRun(false); } From 38d565fc5016739f3602dd9f0cc2720e7db3e2e1 Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Fri, 29 Mar 2024 17:51:14 -0400 Subject: [PATCH 1070/1112] fix test --- .../edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 6f10de10997..1744cdbe009 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -169,14 +169,14 @@ public void testCreateEditDeleteClient() throws InterruptedException { assertEquals(OK.getStatusCode(), rDelete.getStatusCode()); } - @Test - public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { - harvestingClientRun(true); - } @Test public void testHarvestingClientRun_AllowHarvestingMissingCVV_False() throws InterruptedException { harvestingClientRun(false); } + @Test + public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws InterruptedException { + harvestingClientRun(true); + } private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws InterruptedException { From e54a164da8a1d4426eeff0099cba3b98062c4443 Mon Sep 17 00:00:00 2001 From: Julian Gautier Date: Mon, 1 Apr 2024 16:24:53 -0400 Subject: [PATCH 1071/1112] #5819 Clarifies dataset versioning rules Clarifies Dataverse's rules for when depositors are forced to publish a draft version as a major version and when they can choose to publish the version as a major or minor version. See #5819 --- doc/sphinx-guides/source/user/dataset-management.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index d3faf479deb..9538be4a1ec 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -742,9 +742,9 @@ Versioning is important for long-term research data management where metadata an |image3| -Once you edit your published dataset a new draft version of this dataset will be created. To publish this new version of your dataset, select the "Publish Dataset" button on the top right side of the page. If you were at version 1 of your dataset, depending on the types of changes you had made, you would be asked to publish your draft as either version 1.1 or version 2.0. +Once you edit your published dataset, a draft version will be created. To publish this draft version, use the “Publish Dataset” button at the top right side of the page. -**Important Note:** If you add a file, your dataset will automatically be bumped up to a major version (e.g., if you were at 1.0 you will go to 2.0). +If files were added or removed, or if your dataset's previous version was deaccessioned, you must agree to publish the draft as a major version, such as version 2.0. Otherwise, you can choose to publish the draft as a major version or as a minor version, such as version 1.1. On the Versions tab of a dataset page, there is a versions table that displays the version history of the dataset. You can use the version number links in this table to navigate between the different versions of the dataset, including the unpublished draft version, if you have permission to access it. From b08c3e71be9cfa4f1091b99b372f509399a757c6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 1 Apr 2024 21:35:33 -0400 Subject: [PATCH 1072/1112] add jQuery to get search working #10455 --- doc/sphinx-guides/requirements.txt | 3 +++ doc/sphinx-guides/source/conf.py | 1 + 2 files changed, 4 insertions(+) diff --git a/doc/sphinx-guides/requirements.txt b/doc/sphinx-guides/requirements.txt index 20daa189037..8eadb843cff 100755 --- a/doc/sphinx-guides/requirements.txt +++ b/doc/sphinx-guides/requirements.txt @@ -8,3 +8,6 @@ myst-parser==2.0.0 # tabs sphinx-tabs==3.4.5 + +# jQuery +sphinxcontrib-jquery diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 5a4b124cf2e..6478f15655e 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -42,6 +42,7 @@ 'sphinx.ext.viewcode', 'sphinx.ext.graphviz', 'sphinxcontrib.icon', + 'sphinxcontrib.jquery', 'myst_parser', 'sphinx_tabs.tabs', ] From cba6131328c139baff8d26d3d8dd2e749edb05e9 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 2 Apr 2024 16:29:58 -0400 Subject: [PATCH 1073/1112] #10242 featured collection via api --- .../harvard/iq/dataverse/api/Dataverses.java | 82 +++++++++++++++++++ src/main/java/propertyFiles/Bundle.properties | 1 + .../iq/dataverse/api/DataversesIT.java | 25 ++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 8 ++ 4 files changed, 116 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a1dbc3a1de6..6bbace2dffd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -7,6 +7,8 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseContact; +import edu.harvard.iq.dataverse.DataverseFeaturedDataverse; +import edu.harvard.iq.dataverse.DataverseLinkingServiceBean; import edu.harvard.iq.dataverse.DataverseMetadataBlockFacet; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.api.auth.AuthRequired; @@ -14,6 +16,7 @@ import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.FeaturedDataverseServiceBean; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.GuestbookServiceBean; @@ -136,6 +139,7 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.StreamingOutput; +import java.util.ArrayList; import javax.xml.stream.XMLStreamException; /** @@ -167,6 +171,12 @@ public class Dataverses extends AbstractApiBean { @EJB DataverseServiceBean dataverseService; + + @EJB + DataverseLinkingServiceBean linkingService; + + @EJB + FeaturedDataverseServiceBean featuredDataverseService; @EJB SwordServiceBean swordService; @@ -820,6 +830,78 @@ public Response listFacets(@Context ContainerRequestContext crc, @PathParam("ide return e.getResponse(); } } + + @POST + @AuthRequired + @Path("{identifier}/featured") + /** + * Allows user to set featured dataverses - must have edit dataverse permission + * + */ + public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) { + List dvsFromInput = new LinkedList<>(); + for (JsonString dvAlias : Util.asJsonArray(dvAliases).getValuesAs(JsonString.class)) { + + Dataverse dvToBeFeatured = dataverseService.findByAlias(dvAlias.getString()); + if (dvToBeFeatured == null) { + return error(Response.Status.BAD_REQUEST, "Can't find dataverse collection with alias '" + dvAlias + "'"); + } + dvsFromInput.add(dvToBeFeatured); + } + + try { + Dataverse dataverse = findDataverseOrDie(dvIdtf); + List featuredSource = new ArrayList<>(); + List featuredTarget = new ArrayList<>(); + featuredSource.addAll(dataverseService.findAllPublishedByOwnerId(dataverse.getId())); + featuredSource.addAll(linkingService.findLinkedDataverses(dataverse.getId())); + List featuredList = featuredDataverseService.findByDataverseId(dataverse.getId()); + + if(featuredSource.isEmpty()){ + return error(Response.Status.BAD_REQUEST, "There are no collections avaialble to be featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); + } + + for (DataverseFeaturedDataverse dfd : featuredList) { + Dataverse fd = dfd.getFeaturedDataverse(); + featuredTarget.add(fd); + featuredSource.remove(fd); + } + + for (Dataverse test : dvsFromInput) { + if (featuredTarget.contains(test)) { + return error(Response.Status.BAD_REQUEST, "Dataverse collection '" + test.getDisplayName() + "' is already featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); + } + + if (featuredSource.contains(test)) { + featuredTarget.add(test); + } else { + return error(Response.Status.BAD_REQUEST, "Dataverse collection '" + test.getDisplayName() + "' may not be featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); + } + + } + // by passing null for Facets and DataverseFieldTypeInputLevel, those are not changed + execCommand(new UpdateDataverseCommand(dataverse, null, featuredTarget, createDataverseRequest(getRequestUser(crc)), null)); + return ok("Featured Dataverses of dataverse " + dvIdtf + " updated."); + + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + + } + + @DELETE + @AuthRequired + @Path("{identifier}/featured") + public Response deleteFeaturedCollections(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) throws WrappedResponse { + try { + Dataverse dataverse = findDataverseOrDie(dvIdtf); + List featuredTarget = new ArrayList<>(); + execCommand(new UpdateDataverseCommand(dataverse, null, featuredTarget, createDataverseRequest(getRequestUser(crc)), null)); + return ok(BundleUtil.getStringFromBundle("dataverses.api.delete.featured.collections.successful")); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } @POST @AuthRequired diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 692ab9e0686..3887b7e2b67 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2698,6 +2698,7 @@ dataverses.api.move.dataverse.failure.not.published=Published dataverse may not dataverses.api.move.dataverse.error.guestbook=Dataset guestbook is not in target dataverse. dataverses.api.move.dataverse.error.template=Dataverse template is not in target dataverse. dataverses.api.move.dataverse.error.featured=Dataverse is featured in current dataverse. +dataverses.api.delete.featured.collections.successful=Featured dataverses have been removed dataverses.api.move.dataverse.error.metadataBlock=Dataverse metadata block is not in target dataverse. dataverses.api.move.dataverse.error.dataverseLink=Dataverse is linked to target dataverse or one of its parents. dataverses.api.move.dataverse.error.datasetLink=Dataset is linked to target dataverse or one of its parents. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 3330d11435a..4119635f06b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -692,4 +692,29 @@ public void testAttributesApi() throws Exception { assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode()); } + @Test + public void testLinkDataverse() throws Exception { + + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createSubDVToBeFeatured = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-feature", null, apiToken, dataverseAlias); + String subDataverseAlias = UtilIT.getAliasFromResponse(createSubDVToBeFeatured); + + Response featureSubDVResponse = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); + + assertEquals(OK.getStatusCode(), featureSubDVResponse.getStatusCode()); + + Response deleteSubCollectionResponse = UtilIT.deleteDataverse(subDataverseAlias, apiToken); + deleteSubCollectionResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), deleteSubCollectionResponse.getStatusCode()); + + Response deleteCollectionResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + deleteCollectionResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode()); + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ba36911ffae..73c9ed2e3a9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3263,6 +3263,14 @@ static Response addDataverseRole(String pathToJsonFile, String dvAlias, String a .post("/api/roles?dvo="+dvAlias); return addBannerMessageResponse; } + + static Response addFeaturedDataverse (String dvAlias, String featuredDvAlias, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .post("/api/dataverses/"+dvAlias+"/featured/"+featuredDvAlias); + return addBannerMessageResponse; + } static Response deleteDataverseRole( String roleAlias, String apiToken) { From 59ae262239dca3df4b896bc3e593190d39d01ce3 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 10:13:23 -0400 Subject: [PATCH 1074/1112] #10242 add/update/fix tests --- .../harvard/iq/dataverse/api/Dataverses.java | 1 + .../iq/dataverse/api/DataversesIT.java | 33 +++++++++++++++++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 5 ++- 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 6bbace2dffd..222dba3a6e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -840,6 +840,7 @@ public Response listFacets(@Context ContainerRequestContext crc, @PathParam("ide */ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) { List dvsFromInput = new LinkedList<>(); + for (JsonString dvAlias : Util.asJsonArray(dvAliases).getValuesAs(JsonString.class)) { Dataverse dvToBeFeatured = dataverseService.findByAlias(dvAlias.getString()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 4119635f06b..1aab1509cc2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -693,7 +693,7 @@ public void testAttributesApi() throws Exception { } @Test - public void testLinkDataverse() throws Exception { + public void testFeatureDataverse() throws Exception { Response createUser = UtilIT.createRandomUser(); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -701,17 +701,46 @@ public void testLinkDataverse() throws Exception { Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + Response createSubDVToBeFeatured = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-feature", null, apiToken, dataverseAlias); String subDataverseAlias = UtilIT.getAliasFromResponse(createSubDVToBeFeatured); - Response featureSubDVResponse = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); + //publish a sub dataverse so that the owner will have something to feature + Response createSubDVToBePublished = UtilIT.createSubDataverse(UtilIT.getRandomDvAlias() + "-pub", null, apiToken, dataverseAlias); + assertEquals(201, createSubDVToBePublished.getStatusCode()); + String subDataverseAliasPub = UtilIT.getAliasFromResponse(createSubDVToBePublished); + publishDataverse = UtilIT.publishDataverseViaNativeApi(subDataverseAliasPub, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + //can't feature a dataverse that is unpublished + Response featureSubDVResponseUnpublished = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); + featureSubDVResponseUnpublished.prettyPrint(); + assertEquals(400, featureSubDVResponseUnpublished.getStatusCode()); + //can't feature a dataverse you don't own + Response featureSubDVResponseNotOwned = UtilIT.addFeaturedDataverse(dataverseAlias, "root", apiToken); + featureSubDVResponseNotOwned.prettyPrint(); + assertEquals(400, featureSubDVResponseNotOwned.getStatusCode()); + + publishDataverse = UtilIT.publishDataverseViaNativeApi(subDataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + + Response featureSubDVResponse = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); + featureSubDVResponse.prettyPrint(); assertEquals(OK.getStatusCode(), featureSubDVResponse.getStatusCode()); Response deleteSubCollectionResponse = UtilIT.deleteDataverse(subDataverseAlias, apiToken); deleteSubCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteSubCollectionResponse.getStatusCode()); + Response deleteSubCollectionPubResponse = UtilIT.deleteDataverse(subDataverseAliasPub, apiToken); + deleteSubCollectionResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), deleteSubCollectionPubResponse.getStatusCode()); + Response deleteCollectionResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); deleteCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 73c9ed2e3a9..3734424c02d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3265,10 +3265,13 @@ static Response addDataverseRole(String pathToJsonFile, String dvAlias, String a } static Response addFeaturedDataverse (String dvAlias, String featuredDvAlias, String apiToken) { + + String jsonString = "[\"" + featuredDvAlias + "\"]"; Response addBannerMessageResponse = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .post("/api/dataverses/"+dvAlias+"/featured/"+featuredDvAlias); + .body(jsonString) + .post("/api/dataverses/"+dvAlias+"/featured/"); return addBannerMessageResponse; } From dd5e42e0cad6dbf03a5ec40664ac761d185549bd Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 10:27:25 -0400 Subject: [PATCH 1075/1112] #10242 add errormessage checks --- .../harvard/iq/dataverse/api/DataversesIT.java | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 1aab1509cc2..677d4a84673 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -35,7 +35,9 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.file.Files; import io.restassured.path.json.JsonPath; +import static jakarta.ws.rs.core.Response.Status.OK; import org.hamcrest.CoreMatchers; +import static org.hamcrest.CoreMatchers.containsString; import org.hamcrest.Matchers; public class DataversesIT { @@ -719,16 +721,27 @@ public void testFeatureDataverse() throws Exception { Response featureSubDVResponseUnpublished = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); featureSubDVResponseUnpublished.prettyPrint(); assertEquals(400, featureSubDVResponseUnpublished.getStatusCode()); + featureSubDVResponseUnpublished.then().assertThat() + .body(containsString("may not be featured")); //can't feature a dataverse you don't own Response featureSubDVResponseNotOwned = UtilIT.addFeaturedDataverse(dataverseAlias, "root", apiToken); featureSubDVResponseNotOwned.prettyPrint(); assertEquals(400, featureSubDVResponseNotOwned.getStatusCode()); + featureSubDVResponseNotOwned.then().assertThat() + .body(containsString("may not be featured")); + + //can't feature a dataverse that doesn't exist + Response featureSubDVResponseNotExist = UtilIT.addFeaturedDataverse(dataverseAlias, "dummy-alias-sek-foobar-333", apiToken); + featureSubDVResponseNotExist.prettyPrint(); + assertEquals(400, featureSubDVResponseNotExist.getStatusCode()); + featureSubDVResponseNotExist.then().assertThat() + .body(containsString("Can't find dataverse collection")); publishDataverse = UtilIT.publishDataverseViaNativeApi(subDataverseAlias, apiToken); assertEquals(200, publishDataverse.getStatusCode()); - + //once published it should work Response featureSubDVResponse = UtilIT.addFeaturedDataverse(dataverseAlias, subDataverseAlias, apiToken); featureSubDVResponse.prettyPrint(); assertEquals(OK.getStatusCode(), featureSubDVResponse.getStatusCode()); From 0e6de09890175366cd2d5fc313c1118c2c7a4f79 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 10:41:37 -0400 Subject: [PATCH 1076/1112] #10242 add release note --- doc/release-notes/10242-add-feature-dv-api | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10242-add-feature-dv-api diff --git a/doc/release-notes/10242-add-feature-dv-api b/doc/release-notes/10242-add-feature-dv-api new file mode 100644 index 00000000000..5c786554ff9 --- /dev/null +++ b/doc/release-notes/10242-add-feature-dv-api @@ -0,0 +1 @@ +New api endpoints have been added to allow you to add or remove featured collections from a dataverse collection. From 4157c103178cdaf406ca95ea48e2bc41f75471e2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 11:46:46 -0400 Subject: [PATCH 1077/1112] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 44 +++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5c34543d6aa..2ffac501097 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -564,6 +564,50 @@ The fully expanded example above (without environment variables) looks like this Note: you must have "Add Dataset" permission in the given collection to invoke this endpoint. + + +Set Featured Collections for a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Assign featured collections for a given Dataverse collection identified by ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/featured" --upload-file collection-alias.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/featured" --upload-file collection-alias.json + +Where collection-alias.json contains a JSON encoded list of collections aliases to be featured (e.g. ``["collection1-alias","collection2-alias"]``). + +Note: You may only feature collections that are published and owned by or linked to the featuring collection. + +Remove Featured Collections from a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Remove featured collections from a given Dataverse collection identified by ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$ID/featured" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/featured" + .. _create-dataset-command: Create a Dataset in a Dataverse Collection From c44a04a0a761b92b79ebdd9023f0f317165728a1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 13:00:05 -0400 Subject: [PATCH 1078/1112] #10242 update doc wording --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2ffac501097..ac2c3cd7e76 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -569,7 +569,7 @@ Note: you must have "Add Dataset" permission in the given collection to invoke t Set Featured Collections for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Assign featured collections for a given Dataverse collection identified by ``id``: +Add featured collections to a given Dataverse collection identified by ``id``: .. code-block:: bash @@ -587,7 +587,7 @@ The fully expanded example above (without environment variables) looks like this Where collection-alias.json contains a JSON encoded list of collections aliases to be featured (e.g. ``["collection1-alias","collection2-alias"]``). -Note: You may only feature collections that are published and owned by or linked to the featuring collection. +Note: You may only feature collections that are published and owned by or linked to the featuring collection. Also, using this endpoint will only add new featured collections it will not remove collections that have already been featured. Remove Featured Collections from a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From e7134947494724c801719580bae370402191a4c8 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 14:41:57 -0400 Subject: [PATCH 1079/1112] #10242 add get featured collections --- .../harvard/iq/dataverse/api/Dataverses.java | 26 ++++++++++ .../impl/ListFeaturedCollectionsCommand.java | 50 +++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFeaturedCollectionsCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 222dba3a6e0..2ad5ac0e3e1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -59,6 +59,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.ListDataverseContentCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListExplicitGroupsCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListFacetsCommand; +import edu.harvard.iq.dataverse.engine.command.impl.ListFeaturedCollectionsCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlockFacetsCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlocksCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments; @@ -831,6 +832,31 @@ public Response listFacets(@Context ContainerRequestContext crc, @PathParam("ide } } + + @GET + @AuthRequired + @Path("{identifier}/featured") + /* + Allows user to get the collections that are featured by a given collection + probably more for SPA than end user + */ + public Response getFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) { + + try { + User u = getRequestUser(crc); + DataverseRequest r = createDataverseRequest(u); + Dataverse dataverse = findDataverseOrDie(dvIdtf); + JsonArrayBuilder fs = Json.createArrayBuilder(); + for (Dataverse f : execCommand(new ListFeaturedCollectionsCommand(r, dataverse))) { + fs.add(f.getAlias()); + } + return ok(fs); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + + @POST @AuthRequired @Path("{identifier}/featured") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFeaturedCollectionsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFeaturedCollectionsCommand.java new file mode 100644 index 00000000000..4dca522e499 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFeaturedCollectionsCommand.java @@ -0,0 +1,50 @@ + +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseFeaturedDataverse; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * + * @author stephenkraffmiller + */ +public class ListFeaturedCollectionsCommand extends AbstractCommand> { + + private final Dataverse dv; + + public ListFeaturedCollectionsCommand(DataverseRequest aRequest, Dataverse aDataverse) { + super(aRequest, aDataverse); + dv = aDataverse; + } + + @Override + public List execute(CommandContext ctxt) throws CommandException { + List featuredTarget = new ArrayList<>(); + List featuredList = ctxt.featuredDataverses().findByDataverseId(dv.getId()); + for (DataverseFeaturedDataverse dfd : featuredList) { + Dataverse fd = dfd.getFeaturedDataverse(); + featuredTarget.add(fd); + } + return featuredTarget; + + } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + dv.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } + +} From c84de86843e8dc0c959032ad8eff871b4d1d6a03 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 15:52:51 -0400 Subject: [PATCH 1080/1112] #10242 add more tests --- .../iq/dataverse/api/DataversesIT.java | 14 +++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 20 +++++++++++++++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 677d4a84673..083004ff2eb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -746,6 +746,20 @@ public void testFeatureDataverse() throws Exception { featureSubDVResponse.prettyPrint(); assertEquals(OK.getStatusCode(), featureSubDVResponse.getStatusCode()); + + Response getFeaturedDataverseResponse = UtilIT.getFeaturedDataverses(dataverseAlias, apiToken); + getFeaturedDataverseResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), getFeaturedDataverseResponse.getStatusCode()); + getFeaturedDataverseResponse.then().assertThat() + .body("data[0]", equalTo(subDataverseAlias)); + + Response deleteFeaturedDataverseResponse = UtilIT.deleteFeaturedDataverses(dataverseAlias, apiToken); + deleteFeaturedDataverseResponse.prettyPrint(); + + assertEquals(OK.getStatusCode(), deleteFeaturedDataverseResponse.getStatusCode()); + deleteFeaturedDataverseResponse.then().assertThat() + .body(containsString("Featured dataverses have been removed")); + Response deleteSubCollectionResponse = UtilIT.deleteDataverse(subDataverseAlias, apiToken); deleteSubCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteSubCollectionResponse.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 3734424c02d..523bc1e689b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3268,11 +3268,27 @@ static Response addFeaturedDataverse (String dvAlias, String featuredDvAlias, St String jsonString = "[\"" + featuredDvAlias + "\"]"; - Response addBannerMessageResponse = given() + Response addFeaturedDataverseResponse = given() .header(API_TOKEN_HTTP_HEADER, apiToken) .body(jsonString) .post("/api/dataverses/"+dvAlias+"/featured/"); - return addBannerMessageResponse; + return addFeaturedDataverseResponse; + } + + static Response deleteFeaturedDataverses (String dvAlias, String apiToken) { + + Response deleteFeaturedDataversesResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/dataverses/"+dvAlias+"/featured/"); + return deleteFeaturedDataversesResponse; + } + + static Response getFeaturedDataverses (String dvAlias, String apiToken) { + + Response deleteFeaturedDataversesResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/dataverses/"+dvAlias+"/featured/"); + return deleteFeaturedDataversesResponse; } static Response deleteDataverseRole( String roleAlias, String apiToken) { From 4c515bbe3b7f7a97ed1bccfaa427916c0dcf7ec5 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 3 Apr 2024 16:14:31 -0400 Subject: [PATCH 1081/1112] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 22 ++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index ac2c3cd7e76..ceb311e719b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -564,6 +564,24 @@ The fully expanded example above (without environment variables) looks like this Note: you must have "Add Dataset" permission in the given collection to invoke this endpoint. +List Featured Collections for a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Lists the aliases of the featured collections of a given Dataverse collection identified by ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/$ID/featured" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/dataverses/root/featured" Set Featured Collections for a Dataverse Collection @@ -587,7 +605,7 @@ The fully expanded example above (without environment variables) looks like this Where collection-alias.json contains a JSON encoded list of collections aliases to be featured (e.g. ``["collection1-alias","collection2-alias"]``). -Note: You may only feature collections that are published and owned by or linked to the featuring collection. Also, using this endpoint will only add new featured collections it will not remove collections that have already been featured. +Note: You must have "Edit Dataverse" permission in the given Dataverse to invoke this endpoint. You may only feature collections that are published and owned by or linked to the featuring collection. Also, using this endpoint will only add new featured collections it will not remove collections that have already been featured. Remove Featured Collections from a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -608,6 +626,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/featured" +Note: You must have "Edit Dataverse" permission in the given Dataverse to invoke this endpoint. + .. _create-dataset-command: Create a Dataset in a Dataverse Collection From 31784a97ade359881040e3d99d923075f865ee70 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 4 Apr 2024 13:20:37 -0400 Subject: [PATCH 1082/1112] Add Harvesting Client name to the Metadata Source facet --- .../java/edu/harvard/iq/dataverse/search/IndexServiceBean.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index d6b3fd8c339..c68554db180 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -897,7 +897,8 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Thu, 4 Apr 2024 16:59:49 -0400 Subject: [PATCH 1083/1112] Fixed logic with DRAFT and Tests updated --- .../iq/dataverse/api/AbstractApiBean.java | 8 ++- .../harvard/iq/dataverse/api/Datasets.java | 40 +++++++------- ...LatestAccessibleDatasetVersionCommand.java | 15 +++--- .../iq/dataverse/util/json/JsonPrinter.java | 14 ++--- .../harvard/iq/dataverse/api/DatasetsIT.java | 54 +++++++++---------- 5 files changed, 64 insertions(+), 67 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index b7305a24f69..763ae907fde 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -59,6 +59,7 @@ import java.util.logging.Logger; import static org.apache.commons.lang3.StringUtils.isNumeric; +import static org.apache.commons.lang3.StringUtils.reverse; /** * Base class for API beans @@ -398,11 +399,16 @@ protected Dataset findDatasetOrDie(String id) throws WrappedResponse { } protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { + boolean bypassAccessCheck = false; + return findDatasetVersionOrDie(req,versionNumber,ds, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); + } + + protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned, boolean bypassAccessCheck) throws WrappedResponse { DatasetVersion dsv = execCommand(handleVersion(versionNumber, new Datasets.DsVersionHandler>() { @Override public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 4204d8fcb45..d90e978073c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -444,27 +444,12 @@ public Response getVersion(@Context ContainerRequestContext crc, if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) { return notFound("Dataset version not found"); } - - DatasetVersion latestDatasetVersion = null; - - //Check perms is false since we are never going to retrieve files, we are just getting the status of the version. - //We also want to check always for deaccessioned datasets. - boolean deaccesionedLookup = true; - checkPerms = false; - latestDatasetVersion = getDatasetVersionOrDie(req, - DS_VERSION_LATEST, - dataset, - uriInfo, - headers, - deaccesionedLookup, - checkPerms); if (excludeFiles == null ? true : !excludeFiles) { requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId()); } - JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion, - latestDatasetVersion, + JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion, null, excludeFiles == null ? true : !excludeFiles, returnOwners); @@ -2736,18 +2721,29 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, */ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse { - boolean checkFilePerms = true; - return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms); + boolean checkPermsWhenDeaccessioned = true; + boolean bypassAccessCheck = false; + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); + } + + /* + * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions. + */ + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, + UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { + boolean bypassAccessCheck = false; + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); } /* * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error. */ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, - UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) + UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned, + boolean bypassAccessCheck) throws WrappedResponse { - DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); + DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); if (dsv == null || dsv.getId() == null) { throw new WrappedResponse( @@ -4435,9 +4431,9 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String JsonObjectBuilder responseJson; if (isAnonymizedAccess) { List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); - responseJson = json(dsv, null, anonymizedFieldTypeNamesList, true, returnOwners); + responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners); } else { - responseJson = json(dsv, null, null, true, returnOwners); + responseJson = json(dsv, null, true, returnOwners); } return ok(responseJson); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index 7bcc851bde2..a660b8c9f3f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -25,24 +25,27 @@ public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final boolean includeDeaccessioned; - private boolean checkPerms; + private boolean checkPermsWhenDeaccessioned; + private boolean bypassAccessCheck; public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { - this(aRequest, anAffectedDataset, false, false); + this(aRequest, anAffectedDataset,false, false, false); } - public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPerms) { + public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned, boolean bypassAccessCheck) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; this.includeDeaccessioned = includeDeaccessioned; - this.checkPerms = checkPerms; + this.checkPermsWhenDeaccessioned = checkPermsWhenDeaccessioned; + this.bypassAccessCheck = bypassAccessCheck; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { + if (ds.getLatestVersion().isDraft() && + (ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset) || bypassAccessCheck)) { return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); } - return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned, checkPerms)); + return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned, checkPermsWhenDeaccessioned)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 46bbe369c39..99c7a265119 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -412,15 +412,10 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { } public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { - return json(dsv, null, null, includeFiles, false); + return json(dsv, null, includeFiles, false); } - public static JsonObjectBuilder json(DatasetVersion dsv, DatasetVersion latestDsv, boolean includeFiles) { - return json(dsv, latestDsv, null, includeFiles, false); - } - - public static JsonObjectBuilder json(DatasetVersion dsv, - DatasetVersion latestDsv, List anonymizedFieldTypeNamesList, + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles, boolean returnOwners) { Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() @@ -430,6 +425,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv, .add("versionNumber", dsv.getVersionNumber()) .add("versionMinorNumber", dsv.getMinorVersionNumber()) .add("versionState", dsv.getVersionState().name()) + .add("latestVersionPublishingState", dataset.getLatestVersion().getVersionState().name()) .add("versionNote", dsv.getVersionNote()) .add("archiveNote", dsv.getArchiveNote()) .add("deaccessionLink", dsv.getDeaccessionLink()) @@ -443,10 +439,6 @@ public static JsonObjectBuilder json(DatasetVersion dsv, .add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD()) .add("citationDate", dataset.getCitationDateFormattedYYYYMMDD()); - if(latestDsv != null) { - bld.add("latestVersionPublishingState", latestDsv.getVersionState().name()); - } - License license = DatasetUtil.getLicense(dsv); if (license != null) { bld.add("license", jsonLicense(dsv)); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 1c256029373..734d48fd89f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -662,10 +662,35 @@ public void testDatasetVersionsAPI() { UtilIT.publishDataverseViaNativeApi(collectionAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + //Set of tests on non-deaccesioned dataset + String specificVersion = "1.0"; + boolean includeDeaccessioned = false; + Response datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.latestVersionPublishingState", equalTo("RELEASED")); + // Upload another file: String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; Response uploadResponse2 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, apiToken); + uploadResponse2.prettyPrint(); uploadResponse2.then().assertThat().statusCode(OK.getStatusCode()); + + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiToken, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("DRAFT")) + .body("data.latestVersionPublishingState", equalTo("DRAFT")); + + datasetVersion = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_LATEST, apiTokenNoPerms, excludeFiles, includeDeaccessioned); + datasetVersion.prettyPrint(); + datasetVersion.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.versionState", equalTo("RELEASED")) + .body("data.latestVersionPublishingState", equalTo("DRAFT")); // We should now have a published version, and a draft. @@ -712,10 +737,8 @@ public void testDatasetVersionsAPI() { - //Set of tests on non-deaccesioned dataset - String specificVersion = "1.0"; - boolean includeDeaccessioned = false; - Response datasetVersion = null; + + excludeFiles = true; //Latest published authorized token @@ -724,7 +747,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files", equalTo(null)); //Latest published unauthorized token @@ -733,7 +755,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("RELEASED")) .body("data.files", equalTo(null)); //Latest authorized token @@ -742,7 +763,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DRAFT")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files", equalTo(null)); //Latest unauthorized token @@ -751,7 +771,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("RELEASED")) .body("data.files", equalTo(null)); //Specific version authorized token @@ -760,7 +779,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files", equalTo(null)); //Specific version unauthorized token @@ -769,7 +787,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("RELEASED")) .body("data.files", equalTo(null)); excludeFiles = false; @@ -780,7 +797,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files.size()", equalTo(1)); //Latest published unauthorized token @@ -789,7 +805,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("RELEASED")) .body("data.files.size()", equalTo(1)); //Latest authorized token, user is authenticated should get the Draft version @@ -798,7 +813,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DRAFT")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files.size()", equalTo(2)); //Latest unauthorized token, user has no permissions should get the latest Published version @@ -807,7 +821,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("RELEASED")) .body("data.files.size()", equalTo(1)); //Specific version authorized token @@ -816,7 +829,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files.size()", equalTo(1)); //Specific version unauthorized token @@ -825,7 +837,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("RELEASED")) - .body("data.latestVersionPublishingStatus", equalTo("RELEASED")) .body("data.files.size()", equalTo(1)); //We deaccession the dataset @@ -842,7 +853,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DEACCESSIONED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files.size()", equalTo(1)); //Latest published requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets @@ -855,7 +865,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DRAFT")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files.size()", equalTo(2)); //Latest unauthorized token requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets @@ -868,7 +877,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DEACCESSIONED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files.size()", equalTo(1)); //Specific version unauthorized token requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets. @@ -884,7 +892,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DEACCESSIONED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files", equalTo(null)); //Latest published exclude files, should get the DEACCESSIONED version @@ -893,7 +900,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DEACCESSIONED")) - .body("data.latestVersionPublishingStatus", equalTo("DEACCESSIONED")) .body("data.files", equalTo(null)); //Latest authorized token should get the DRAFT version with no files @@ -902,7 +908,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DRAFT")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files", equalTo(null)); //Latest unauthorized token excluding files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets @@ -911,7 +916,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DEACCESSIONED")) - .body("data.latestVersionPublishingStatus", equalTo("DEACCESSIONED")) .body("data.files", equalTo(null)); //Specific version authorized token @@ -920,7 +924,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DEACCESSIONED")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files", equalTo(null)); //Specific version unauthorized token requesting files, one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets. @@ -929,7 +932,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DEACCESSIONED")) - .body("data.latestVersionPublishingStatus", equalTo("DEACCESSIONED")) .body("data.files", equalTo(null)); //Set of test when we have a deaccessioned dataset but we don't include deaccessioned @@ -952,7 +954,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DRAFT")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files.size()", equalTo(2)); //Latest unauthorized token one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets @@ -987,7 +988,6 @@ public void testDatasetVersionsAPI() { datasetVersion.prettyPrint(); datasetVersion.then().assertThat().statusCode(OK.getStatusCode()) .body("data.versionState", equalTo("DRAFT")) - .body("data.latestVersionPublishingStatus", equalTo("DRAFT")) .body("data.files", equalTo(null)); //Latest unauthorized token one version is DEACCESSIONED the second is DRAFT so shouldn't get any datasets From 0bf1e7c8c28b8769918316e38dd9f22ce2773993 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 5 Apr 2024 10:04:29 +0100 Subject: [PATCH 1084/1112] Changed: using CommandContext for injecting MetadataBlockServiceBean in ListMetadataBlocksCommand --- .../harvard/iq/dataverse/EjbDataverseEngine.java | 8 ++++++++ .../edu/harvard/iq/dataverse/api/Dataverses.java | 3 +-- .../iq/dataverse/engine/command/CommandContext.java | 5 ++++- .../command/impl/ListMetadataBlocksCommand.java | 13 +++++-------- .../iq/dataverse/engine/TestCommandContext.java | 5 +++++ 5 files changed, 23 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index bb3fa475847..b3b69e25bf3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -124,6 +124,9 @@ public class EjbDataverseEngine { @EJB GuestbookResponseServiceBean responses; + + @EJB + MetadataBlockServiceBean metadataBlockService; @EJB DataverseLinkingServiceBean dvLinking; @@ -587,6 +590,11 @@ public ActionLogServiceBean actionLog() { return logSvc; } + @Override + public MetadataBlockServiceBean metadataBlocks() { + return metadataBlockService; + } + @Override public void beginCommandSequence() { this.commandsCalled = new Stack(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 6c105b67d77..250257fc33b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -719,8 +719,7 @@ public Response listMetadataBlocks(@Context ContainerRequestContext crc, new ListMetadataBlocksCommand( createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf), - onlyDisplayedOnCreate, - metadataBlockSvc + onlyDisplayedOnCreate ) ); return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate)); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java index 6c4d63e3e35..48e8cd952b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java @@ -15,6 +15,7 @@ import edu.harvard.iq.dataverse.FileDownloadServiceBean; import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.GuestbookServiceBean; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; @@ -133,7 +134,9 @@ public interface CommandContext { public ConfirmEmailServiceBean confirmEmail(); public ActionLogServiceBean actionLog(); - + + public MetadataBlockServiceBean metadataBlocks(); + public void beginCommandSequence(); public boolean completeCommandSequence(Command command); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java index f4689596160..8275533ced2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.MetadataBlock; -import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -24,28 +23,26 @@ public class ListMetadataBlocksCommand extends AbstractCommand execute(CommandContext ctxt) throws CommandException { if (onlyDisplayedOnCreate) { - return listMetadataBlocksDisplayedOnCreate(dataverse); + return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse); } return dataverse.getMetadataBlocks(); } - private List listMetadataBlocksDisplayedOnCreate(Dataverse dataverse) { + private List listMetadataBlocksDisplayedOnCreate(CommandContext ctxt, Dataverse dataverse) { if (dataverse.isMetadataBlockRoot() || dataverse.getOwner() == null) { - return metadataBlockService.listMetadataBlocksDisplayedOnCreate(dataverse); + return ctxt.metadataBlocks().listMetadataBlocksDisplayedOnCreate(dataverse); } - return listMetadataBlocksDisplayedOnCreate(dataverse.getOwner()); + return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse.getOwner()); } @Override diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java index 255125189ae..fa89bb756f5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java @@ -224,6 +224,11 @@ public ConfirmEmailServiceBean confirmEmail() { public ActionLogServiceBean actionLog() { return null; } + + @Override + public MetadataBlockServiceBean metadataBlocks() { + return null; + } @Override public StorageUseServiceBean storageUse() { From 977bf90b9d0fd8522c7d570c65f6a65382a74f7b Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 5 Apr 2024 11:03:17 +0100 Subject: [PATCH 1085/1112] Changed: variable renamed in JsonPrinter class to improve readability --- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c7fb78e01e6..8b7f94d3dd1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -580,10 +580,10 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie return blockBld; } - public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean onlyDisplayedOnCreate) { + public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes) { JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for (MetadataBlock metadataBlock : metadataBlocks) { - arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, onlyDisplayedOnCreate) : brief.json(metadataBlock)); + arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, printOnlyDisplayedOnCreateDatasetFieldTypes) : brief.json(metadataBlock)); } return arrayBuilder; } @@ -614,7 +614,7 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock) { return json(metadataBlock, false); } - public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean onlyDisplayedOnCreate) { + public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printOnlyDisplayedOnCreateDatasetFieldTypes) { JsonObjectBuilder jsonObjectBuilder = jsonObjectBuilder(); jsonObjectBuilder.add("id", metadataBlock.getId()); jsonObjectBuilder.add("name", metadataBlock.getName()); @@ -623,7 +623,7 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean onlyDi JsonObjectBuilder fieldsBuilder = jsonObjectBuilder(); for (DatasetFieldType datasetFieldType : new TreeSet<>(metadataBlock.getDatasetFieldTypes())) { - if (!onlyDisplayedOnCreate || datasetFieldType.isDisplayOnCreate()) { + if (!printOnlyDisplayedOnCreateDatasetFieldTypes || datasetFieldType.isDisplayOnCreate()) { fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType)); } } From 59d55786072e742e472d28f783a038a559acadae Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 5 Apr 2024 11:16:53 -0400 Subject: [PATCH 1086/1112] Add release note --- doc/release-notes/10464-add-name-harvesting-client-facet.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10464-add-name-harvesting-client-facet.md diff --git a/doc/release-notes/10464-add-name-harvesting-client-facet.md b/doc/release-notes/10464-add-name-harvesting-client-facet.md new file mode 100644 index 00000000000..f56d9f164af --- /dev/null +++ b/doc/release-notes/10464-add-name-harvesting-client-facet.md @@ -0,0 +1,3 @@ +The Metadata Source facet has been updated to show the name of the import client rather than grouping all under 'harvested' + +TODO: Please add notes to re-index http://localhost:8080/api/admin/index guides at: https://guides.dataverse.org/en/latest/admin/solr-search-index.html \ No newline at end of file From 510b7932e5141f142a116a76a5073accab1dc500 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 5 Apr 2024 14:37:39 -0400 Subject: [PATCH 1087/1112] Doc update and remove previous changes --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- .../edu/harvard/iq/dataverse/api/AbstractApiBean.java | 7 +------ .../impl/GetLatestAccessibleDatasetVersionCommand.java | 8 +++----- 3 files changed, 8 insertions(+), 15 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5c34543d6aa..0eb4e1515a7 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -995,7 +995,7 @@ It returns a list of versions with their metadata, and file list: ] } -The optional ``includeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions. @@ -1011,15 +1011,15 @@ Get Version of a Dataset export ID=24 export VERSION=1.0 - curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION?includeFiles=false" + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION?excludeFiles=false" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeFiles=false" + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?excludeFiles=false" -The optional ``includeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 763ae907fde..72c214004fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -399,16 +399,11 @@ protected Dataset findDatasetOrDie(String id) throws WrappedResponse { } protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse { - boolean bypassAccessCheck = false; - return findDatasetVersionOrDie(req,versionNumber,ds, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); - } - - protected DatasetVersion findDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned, boolean bypassAccessCheck) throws WrappedResponse { DatasetVersion dsv = execCommand(handleVersion(versionNumber, new Datasets.DsVersionHandler>() { @Override public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index a660b8c9f3f..431b3ff47c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -26,24 +26,22 @@ public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand Date: Fri, 5 Apr 2024 14:43:59 -0400 Subject: [PATCH 1088/1112] Missing changes from last commit --- src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java | 1 - src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 72c214004fe..b7305a24f69 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -59,7 +59,6 @@ import java.util.logging.Logger; import static org.apache.commons.lang3.StringUtils.isNumeric; -import static org.apache.commons.lang3.StringUtils.reverse; /** * Base class for API beans diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index d90e978073c..930776ffb0f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2743,7 +2743,7 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String boolean bypassAccessCheck) throws WrappedResponse { - DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck); + DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned); if (dsv == null || dsv.getId() == null) { throw new WrappedResponse( From c435bc91de533d8f6bdb54b0d29efc1c0f7a400a Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 5 Apr 2024 18:42:08 -0400 Subject: [PATCH 1089/1112] Integration Test added --- .../edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 4466182b435..1c23af36142 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -15,9 +15,7 @@ import static jakarta.ws.rs.core.Response.Status.ACCEPTED; import static jakarta.ws.rs.core.Response.Status.OK; import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; -import static org.junit.jupiter.api.Assertions.assertTrue; /** * This class tests Harvesting Client functionality. @@ -272,6 +270,12 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte } while (i Date: Sat, 6 Apr 2024 16:50:47 +0200 Subject: [PATCH 1090/1112] Explain that 5.6 introduced a breaking change to the API #9549 The breaking change happened in 509746ca and was shipped in 5.6. --- doc/sphinx-guides/source/api/changelog.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 025a2069d6e..f826a2cd603 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -24,4 +24,9 @@ v6.1 v6.0 ---- -- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. See :doc:`dataaccess`. \ No newline at end of file +- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. See :doc:`dataaccess`. + +v5.6 +---- + +- **/api/dataverses/$PARENT/datasets**: The "create dataset" API endpoint now requires the header ``Content-type:application/json`` to be passed. The error can be confusing, saying something about validation, such as ``'{"status":"ERROR","message":"Validation Failed: Title is required. (Invalid value:edu.harvard.iq.dataverse.DatasetField[ id=null ])...``. See :ref:`create-dataset-command`. From 43be1f36d3e8c3d81e79fea481d1e3c0e49f0cce Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Sun, 7 Apr 2024 07:50:22 +0200 Subject: [PATCH 1091/1112] remove double space --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index f826a2cd603..e33204e6354 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -29,4 +29,4 @@ v6.0 v5.6 ---- -- **/api/dataverses/$PARENT/datasets**: The "create dataset" API endpoint now requires the header ``Content-type:application/json`` to be passed. The error can be confusing, saying something about validation, such as ``'{"status":"ERROR","message":"Validation Failed: Title is required. (Invalid value:edu.harvard.iq.dataverse.DatasetField[ id=null ])...``. See :ref:`create-dataset-command`. +- **/api/dataverses/$PARENT/datasets**: The "create dataset" API endpoint now requires the header ``Content-type:application/json`` to be passed. The error can be confusing, saying something about validation, such as ``'{"status":"ERROR","message":"Validation Failed: Title is required. (Invalid value:edu.harvard.iq.dataverse.DatasetField[ id=null ])...``. See :ref:`create-dataset-command`. From df21a087463820992e64f60bfbd900fc425270a1 Mon Sep 17 00:00:00 2001 From: Stephan Heunis Date: Mon, 8 Apr 2024 16:51:57 +0200 Subject: [PATCH 1092/1112] Adds DataLad to the list of integrations in the dataverse admin guide #10468 --- .../source/admin/integrations.rst | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index 2b6bdb8eeb5..e48d7a60798 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -132,6 +132,32 @@ Globus transfer uses an efficient transfer mechanism and has additional features Users can transfer files via `Globus `_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) and a community-developed `dataverse-globus `_ app has been properly installed and configured. +DataLad ++++++++ + +`DataLad`_ is a free and open source decentralized data management system that is built on `git`_ +and `git-annex`_ and provides a unified interface for version control, deposition, content retrieval, +provenance tracking, reproducible execution, and further collaborative management of distributed and +arbitrarily large datasets. + +If your dataset is structured as a `DataLad dataset`_ and you have a local DataLad installation, +the `datalad-dataverse`_ extension package provides interoperability with Dataverse for the purpose +of depositing DataLad datasets to and retrieving DataLad datasets from Dataverse instances, together +with full version history. + +For further information, visit the ``datalad-dataverse`` extension's `documentation page`_, see the +`quickstart`_ for installation details, or follow the step-by-step `tutorial`_ to get hands-on +experience. + +.. _DataLad: https://www.datalad.org +.. _git: https://git-scm.com +.. _git-annex: https://git-annex.branchable.com +.. _DataLad dataset: https://handbook.datalad.org/en/latest/basics/basics-datasets.html +.. _datalad-dataverse: https://github.com/datalad/datalad-dataverse +.. _documentation page: https://docs.datalad.org/projects/dataverse/en/latest/index.html +.. _quickstart: https://docs.datalad.org/projects/dataverse/en/latest/settingup.html +.. _tutorial: https://docs.datalad.org/projects/dataverse/en/latest/tutorial.html + Embedding Data on Websites -------------------------- From 76c67b8333c29cdca0139b11a730c9e2502e14aa Mon Sep 17 00:00:00 2001 From: okaradeniz Date: Tue, 9 Apr 2024 11:50:43 +0200 Subject: [PATCH 1093/1112] cleanup of unused code --- .../iq/dataverse/FileDownloadServiceBean.java | 56 +++++-------------- src/main/webapp/file.xhtml | 6 +- 2 files changed, 17 insertions(+), 45 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 07a25444f27..c5073693ab2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -382,35 +382,26 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter } } - public void downloadDatasetCitationXML(Dataset dataset) { - downloadCitationXML(null, dataset, false); - } - public void downloadDatasetCitationXML(DatasetVersion version) { // DatasetVersion-level citation: DataCitation citation=null; citation = new DataCitation(version); - String fileNameString; fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".xml"; downloadXML(citation, fileNameString); } public void downloadDatafileCitationXML(FileMetadata fileMetadata) { - downloadCitationXML(fileMetadata, null, false); + downloadCitationXML(fileMetadata, false); } public void downloadDirectDatafileCitationXML(FileMetadata fileMetadata) { - downloadCitationXML(fileMetadata, null, true); + downloadCitationXML(fileMetadata, true); } - public void downloadCitationXML(FileMetadata fileMetadata, Dataset dataset, boolean direct) { + public void downloadCitationXML(FileMetadata fileMetadata, boolean direct) { DataCitation citation=null; - if (dataset != null){ - citation = new DataCitation(dataset.getLatestVersion()); - } else { - citation= new DataCitation(fileMetadata, direct); - } + citation= new DataCitation(fileMetadata, direct); String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: @@ -436,12 +427,6 @@ public void downloadXML(DataCitation citation, String fileNameString) { } catch (IOException e) { } } - - public void downloadDatasetCitationRIS(Dataset dataset) { - - downloadCitationRIS(null, dataset, false); - - } public void downloadDatasetCitationRIS(DatasetVersion version) { // DatasetVersion-level citation: @@ -454,21 +439,17 @@ public void downloadDatasetCitationRIS(DatasetVersion version) { } public void downloadDatafileCitationRIS(FileMetadata fileMetadata) { - downloadCitationRIS(fileMetadata, null, false); + downloadCitationRIS(fileMetadata, false); } public void downloadDirectDatafileCitationRIS(FileMetadata fileMetadata) { - downloadCitationRIS(fileMetadata, null, true); + downloadCitationRIS(fileMetadata, true); } - public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, boolean direct) { + public void downloadCitationRIS(FileMetadata fileMetadata, boolean direct) { DataCitation citation=null; - if (dataset != null){ - citation = new DataCitation(dataset.getLatestVersion()); - } else { - citation= new DataCitation(fileMetadata, direct); - } - + citation= new DataCitation(fileMetadata, direct); + String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: @@ -501,11 +482,6 @@ private String getFileNameFromPid(GlobalId id) { return id.asString(); } - public void downloadDatasetCitationBibtex(Dataset dataset) { - - downloadCitationBibtex(null, dataset, false); - - } public void downloadDatasetCitationBibtex(DatasetVersion version) { // DatasetVersion-level citation: @@ -518,21 +494,17 @@ public void downloadDatasetCitationBibtex(DatasetVersion version) { } public void downloadDatafileCitationBibtex(FileMetadata fileMetadata) { - downloadCitationBibtex(fileMetadata, null, false); + downloadCitationBibtex(fileMetadata, false); } public void downloadDirectDatafileCitationBibtex(FileMetadata fileMetadata) { - downloadCitationBibtex(fileMetadata, null, true); + downloadCitationBibtex(fileMetadata, true); } - public void downloadCitationBibtex(FileMetadata fileMetadata, Dataset dataset, boolean direct) { + public void downloadCitationBibtex(FileMetadata fileMetadata, boolean direct) { DataCitation citation=null; - if (dataset != null){ - citation = new DataCitation(dataset.getLatestVersion()); - } else { - citation= new DataCitation(fileMetadata, direct); - } - + citation= new DataCitation(fileMetadata, direct); + String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index bcd48fd1f32..ea7b51f9640 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -112,19 +112,19 @@

  • From 31812f9b4d9189af64faba216e6ed0addd98d361 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 9 Apr 2024 11:33:48 -0400 Subject: [PATCH 1094/1112] clarify how to run test suite remove reference to docker-aio --- doc/sphinx-guides/source/developers/testing.rst | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 2ea85913d42..3910a40b118 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -142,6 +142,8 @@ Generally speaking, unit tests have been flagged as non-essential because they a You should not feel obligated to run these tests continuously but you can use the ``mvn`` command above to run them. To iterate on the unit test in Netbeans and execute it with "Run -> Test File", you must temporarily comment out the annotation flagging the test as non-essential. +.. _integration-tests: + Integration Tests ----------------- @@ -392,10 +394,10 @@ Run this as the "dataverse" user. Note that after deployment the file "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" exists and is empty. -Run API Tests -~~~~~~~~~~~~~ +Run API Tests to Determine Code Coverage +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Note that even though you see "docker-aio" in the command below, we assume you are not necessarily running the test suite within Docker. (Some day we'll probably move this script to another directory.) For this reason, we pass the URL with the normal port (8080) that app servers run on to the ``run-test-suite.sh`` script. +Note that if you are looking for how to run API tests generally, you should refer to :ref:`integration-tests`. Note that "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" will become non-empty after you stop and start Payara. You must stop and start Payara before every run of the integration test suite. @@ -405,7 +407,8 @@ Note that "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" will /usr/local/payara6/bin/asadmin start-domain git clone https://github.com/IQSS/dataverse.git cd dataverse - conf/docker-aio/run-test-suite.sh http://localhost:8080 + TESTS=$( Date: Tue, 9 Apr 2024 12:30:28 -0400 Subject: [PATCH 1095/1112] fixing test --- .../iq/dataverse/api/HarvestingClientsIT.java | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 1744cdbe009..ead9d9f5cec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -179,7 +179,7 @@ public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws Int } private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws InterruptedException { - + int expectedNumberOfSetsHarvested = allowHarvestingMissingCVV ? DATASETS_IN_CONTROL_SET : DATASETS_IN_CONTROL_SET - 1; // This test will create a client and attempt to perform an actual // harvest and validate the resulting harvested content. @@ -226,11 +226,11 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte int i = 0; int maxWait=20; // a very conservative interval; this harvest has no business taking this long do { - // Give it an initial 1 sec. delay, to make sure the client state + // Give it an initial 2 sec. delay, to make sure the client state // has been updated in the database, which can take some appreciable // amount of time on a heavily-loaded server running a full suite of // tests: - Thread.sleep(1000L); + Thread.sleep(2000L); // keep checking the status of the client with the GET api: Response getClientResponse = given() .get(clientApiPath); @@ -263,11 +263,7 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte assertEquals(harvestTimeStamp, responseJsonPath.getString("data.lastNonEmpty")); // d) Confirm that the correct number of datasets have been harvested: - if (allowHarvestingMissingCVV) { - assertEquals(DATASETS_IN_CONTROL_SET, responseJsonPath.getInt("data.lastDatasetsHarvested")); - } else { - assertTrue(responseJsonPath.getInt("data.lastDatasetsHarvested") < DATASETS_IN_CONTROL_SET); - } + assertEquals(expectedNumberOfSetsHarvested, responseJsonPath.getInt("data.lastDatasetsHarvested")); // ok, it looks like the harvest has completed successfully. break; From 2669bccb6d9c3ed9681709faf614cb5343af373b Mon Sep 17 00:00:00 2001 From: Steven Winship Date: Tue, 9 Apr 2024 12:31:37 -0400 Subject: [PATCH 1096/1112] fixing test --- .../java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index ead9d9f5cec..1ad70d2dbb5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -180,6 +180,7 @@ public void testHarvestingClientRun_AllowHarvestingMissingCVV_True() throws Int private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws InterruptedException { int expectedNumberOfSetsHarvested = allowHarvestingMissingCVV ? DATASETS_IN_CONTROL_SET : DATASETS_IN_CONTROL_SET - 1; + // This test will create a client and attempt to perform an actual // harvest and validate the resulting harvested content. From 6ede22582e063c884e81b6cc3176c37f5d557f31 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 9 Apr 2024 14:27:39 -0400 Subject: [PATCH 1097/1112] Add release note #10468 --- doc/release-notes/10468-doc-datalad-integration.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10468-doc-datalad-integration.md diff --git a/doc/release-notes/10468-doc-datalad-integration.md b/doc/release-notes/10468-doc-datalad-integration.md new file mode 100644 index 00000000000..cd4d2d53a5f --- /dev/null +++ b/doc/release-notes/10468-doc-datalad-integration.md @@ -0,0 +1 @@ +DataLad has been integrated with Dataverse. For more information, see https://dataverse-guide--10470.org.readthedocs.build/en/10470/admin/integrations.html#datalad From 8b879f831a203eb7d9d8393d5e3359e96aafcfd5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 10 Apr 2024 15:03:34 +0100 Subject: [PATCH 1098/1112] Changed: publicationURL displayOnCreate field set to true --- scripts/api/data/metadatablocks/citation.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index c5af05927dc..10084faedd3 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -33,7 +33,7 @@ publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier - publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution + publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE publication citation https://schema.org/distribution notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation From 8b527418340a926f8e000d38dd8ade170c44e008 Mon Sep 17 00:00:00 2001 From: landreev Date: Wed, 10 Apr 2024 13:49:26 -0400 Subject: [PATCH 1099/1112] Update 10464-add-name-harvesting-client-facet.md --- doc/release-notes/10464-add-name-harvesting-client-facet.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/10464-add-name-harvesting-client-facet.md b/doc/release-notes/10464-add-name-harvesting-client-facet.md index f56d9f164af..1fc0bb47caf 100644 --- a/doc/release-notes/10464-add-name-harvesting-client-facet.md +++ b/doc/release-notes/10464-add-name-harvesting-client-facet.md @@ -1,3 +1,3 @@ -The Metadata Source facet has been updated to show the name of the import client rather than grouping all under 'harvested' +The Metadata Source facet has been updated to show the name of the harvesting client rather than grouping all such datasets under 'harvested' -TODO: Please add notes to re-index http://localhost:8080/api/admin/index guides at: https://guides.dataverse.org/en/latest/admin/solr-search-index.html \ No newline at end of file +TODO: for the v6.13 release note: Please add a full re-index using http://localhost:8080/api/admin/index to the upgrade instructions. From 7137ce93248cbec269ccec469009d5d0250e5374 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 10 Apr 2024 12:07:53 -0600 Subject: [PATCH 1100/1112] Fix when MDC is displayed (#10463) --- src/main/webapp/dataset.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 7b5db98b9dd..3947c4415f9 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -592,7 +592,7 @@
    -
    +
    From 98cd85cbb6fbf03f3c7217a755b8a27dbb6328b2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 10 Apr 2024 16:25:39 -0400 Subject: [PATCH 1101/1112] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2440d522eba..8e2a9d7b886 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -588,7 +588,7 @@ Note: you must have "Add Dataset" permission in the given collection to invoke t List Featured Collections for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Lists the aliases of the featured collections of a given Dataverse collection identified by ``id``: +The response is a JSON array of the alias strings of the featured collections of a given Dataverse collection identified by ``id``: .. code-block:: bash From dbc9681f381353ae5bc4bf700997198eb106d97a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 11 Apr 2024 06:33:20 -0400 Subject: [PATCH 1102/1112] Remove visible quote marks --- scripts/api/data/metadatablocks/computational_workflow.tsv | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/api/data/metadatablocks/computational_workflow.tsv b/scripts/api/data/metadatablocks/computational_workflow.tsv index 17e1ec48925..f3a9b2cce5d 100644 --- a/scripts/api/data/metadatablocks/computational_workflow.tsv +++ b/scripts/api/data/metadatablocks/computational_workflow.tsv @@ -2,7 +2,7 @@ computationalworkflow Computational Workflow Metadata #datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI workflowType Computational Workflow Type The kind of Computational Workflow, which is designed to compose and execute a series of computational or data manipulation steps in a scientific application text 0 TRUE TRUE TRUE TRUE TRUE FALSE computationalworkflow - workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 "#VALUE" FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow + workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 #VALUE FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow workflowDocumentation Documentation A link (URL) to the documentation or text describing the Computational Workflow and its use textbox 2 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow #controlledVocabulary DatasetField Value identifier displayOrder workflowType Common Workflow Language (CWL) workflowtype_cwl 1 @@ -18,4 +18,4 @@ workflowType Makefile workflowtype_makefile 11 workflowType Other Python-based workflow workflowtype_otherpython 12 workflowType Other R-based workflow workflowtype_otherrbased 13 - workflowType Other workflowtype_other 100 \ No newline at end of file + workflowType Other workflowtype_other 100 From f4613f1daf3d1ac093a3108dca49c0e7684f8d5e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 11 Apr 2024 08:58:22 -0400 Subject: [PATCH 1103/1112] Update scripts/api/data/metadatablocks/computational_workflow.tsv Co-authored-by: Philip Durbin --- scripts/api/data/metadatablocks/computational_workflow.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/api/data/metadatablocks/computational_workflow.tsv b/scripts/api/data/metadatablocks/computational_workflow.tsv index f3a9b2cce5d..3cd0c26a464 100644 --- a/scripts/api/data/metadatablocks/computational_workflow.tsv +++ b/scripts/api/data/metadatablocks/computational_workflow.tsv @@ -2,7 +2,7 @@ computationalworkflow Computational Workflow Metadata #datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI workflowType Computational Workflow Type The kind of Computational Workflow, which is designed to compose and execute a series of computational or data manipulation steps in a scientific application text 0 TRUE TRUE TRUE TRUE TRUE FALSE computationalworkflow - workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 #VALUE FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow + workflowCodeRepository External Code Repository URL A link to the repository where the un-compiled, human readable code and related code is located (e.g. GitHub, GitLab, SVN) https://... url 1 #VALUE FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow workflowDocumentation Documentation A link (URL) to the documentation or text describing the Computational Workflow and its use textbox 2 FALSE FALSE TRUE FALSE TRUE FALSE computationalworkflow #controlledVocabulary DatasetField Value identifier displayOrder workflowType Common Workflow Language (CWL) workflowtype_cwl 1 From e22501d5a100fbb3b5d1225358dcae8f9d896ffe Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Thu, 11 Apr 2024 11:59:05 -0400 Subject: [PATCH 1104/1112] adding better cleanup between tests --- .../iq/dataverse/api/HarvestingClientsIT.java | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index ac38cc693df..5ab1eda74df 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -279,11 +279,19 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte // Fail if it hasn't completed in maxWait seconds assertTrue(i < maxWait); - - // TODO(?) use the native Dataverses/Datasets apis to verify that the expected - // datasets have been harvested. This may or may not be necessary, seeing - // how we have already confirmed the number of successfully harvested - // datasets from the control set; somewhat hard to imagine a practical - // situation where that would not be enough (?). + + // cleanup datasets so other tests can run + Response deleteResponse = given() + .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey) + .delete(clientApiPath); + clientApiPath = null; + System.out.println("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); + + i = 0; + maxWait=20; + do { + Thread.sleep(1000L); + searchHarvestedDatasets = UtilIT.search("metadataSource:" + nickName, adminUserAPIKey); + } while (i++ Date: Thu, 11 Apr 2024 15:05:47 -0400 Subject: [PATCH 1105/1112] refine cleanup --- .../iq/dataverse/api/HarvestingClientsIT.java | 26 ++++++++----------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 5ab1eda74df..fc034e2ad06 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -36,6 +36,7 @@ public class HarvestingClientsIT { private static final String HARVEST_METADATA_FORMAT = "oai_dc"; private static final String ARCHIVE_DESCRIPTION = "RestAssured harvesting client test"; private static final String CONTROL_OAI_SET = "controlTestSet2"; + private static final String CONTROL_OAI_SET_IDS = "doi:10.5072/FK2"; private static final int DATASETS_IN_CONTROL_SET = 8; private static String normalUserAPIKey; private static String adminUserAPIKey; @@ -54,13 +55,22 @@ public static void setUpClass() { } @AfterEach - public void cleanup() { + public void cleanup() throws InterruptedException { if (clientApiPath != null) { Response deleteResponse = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey) .delete(clientApiPath); clientApiPath = null; System.out.println("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); + + int i = 0; + int maxWait = 20; + do { + if (UtilIT.search("dsPersistentId:" + CONTROL_OAI_SET_IDS, normalUserAPIKey).prettyPrint().contains("count_in_response\": 0")) { + break; + } + Thread.sleep(1000L); + } while (i++ < maxWait); } } @@ -279,19 +289,5 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte // Fail if it hasn't completed in maxWait seconds assertTrue(i < maxWait); - - // cleanup datasets so other tests can run - Response deleteResponse = given() - .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey) - .delete(clientApiPath); - clientApiPath = null; - System.out.println("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode()); - - i = 0; - maxWait=20; - do { - Thread.sleep(1000L); - searchHarvestedDatasets = UtilIT.search("metadataSource:" + nickName, adminUserAPIKey); - } while (i++ Date: Thu, 11 Apr 2024 16:32:51 -0400 Subject: [PATCH 1106/1112] refine cleanup --- .../iq/dataverse/api/HarvestingClientsIT.java | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index fc034e2ad06..d25a4f42872 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -1,6 +1,9 @@ package edu.harvard.iq.dataverse.api; +import java.util.ArrayList; +import java.util.List; import java.util.logging.Logger; +import java.util.stream.Collectors; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -36,12 +39,12 @@ public class HarvestingClientsIT { private static final String HARVEST_METADATA_FORMAT = "oai_dc"; private static final String ARCHIVE_DESCRIPTION = "RestAssured harvesting client test"; private static final String CONTROL_OAI_SET = "controlTestSet2"; - private static final String CONTROL_OAI_SET_IDS = "doi:10.5072/FK2"; private static final int DATASETS_IN_CONTROL_SET = 8; private static String normalUserAPIKey; private static String adminUserAPIKey; private static String harvestCollectionAlias; String clientApiPath = null; + List globalIdList = new ArrayList(); @BeforeAll public static void setUpClass() { @@ -65,13 +68,15 @@ public void cleanup() throws InterruptedException { int i = 0; int maxWait = 20; + String query = "dsPersistentId:" + globalIdList.stream().map(s -> "\""+s+"\"").collect(Collectors.joining(",")); do { - if (UtilIT.search("dsPersistentId:" + CONTROL_OAI_SET_IDS, normalUserAPIKey).prettyPrint().contains("count_in_response\": 0")) { + if (UtilIT.search(query, normalUserAPIKey).prettyPrint().contains("count_in_response\": 0")) { break; } Thread.sleep(1000L); } while (i++ < maxWait); } + globalIdList.clear(); } private static void setupUsers() { @@ -282,10 +287,16 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte System.out.println("Waited " + i + " seconds for the harvest to complete."); Response searchHarvestedDatasets = UtilIT.search("metadataSource:" + nickName, normalUserAPIKey); + searchHarvestedDatasets.then().assertThat().statusCode(OK.getStatusCode()); searchHarvestedDatasets.prettyPrint(); - searchHarvestedDatasets.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data.total_count", equalTo(expectedNumberOfSetsHarvested)); + // Get all global ids for cleanup + JsonPath jsonPath = searchHarvestedDatasets.getBody().jsonPath(); + int sz = jsonPath.getInt("data.items.size()"); + for(int idx = 0; idx < sz; idx++) { + globalIdList.add(jsonPath.getString("data.items["+idx+"].global_id")); + } + // verify count after collecting global ids + assertEquals(expectedNumberOfSetsHarvested, jsonPath.getInt("data.total_count")); // Fail if it hasn't completed in maxWait seconds assertTrue(i < maxWait); From 04daca665cf3e08bf858d12d3c69f81b602985d9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 11 Apr 2024 17:59:50 -0400 Subject: [PATCH 1107/1112] missed release note addition --- doc/release-notes/10339-workflow.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/10339-workflow.md b/doc/release-notes/10339-workflow.md index 8998f9794df..90d08dabb1f 100644 --- a/doc/release-notes/10339-workflow.md +++ b/doc/release-notes/10339-workflow.md @@ -1 +1,3 @@ -The computational workflow metadata block has been updated to present a clickable link for the External Code Repository URL field. \ No newline at end of file +The computational workflow metadata block has been updated to present a clickable link for the External Code Repository URL field. + +Release notes should include the usual instructions, for those who have installed this optional block, to update the computational_workflow block. (PR#10441) \ No newline at end of file From b1078ac8b6bcf21b49c7cb2210b23aeef7d82d64 Mon Sep 17 00:00:00 2001 From: Johannes Darms Date: Fri, 12 Apr 2024 13:07:58 +0200 Subject: [PATCH 1108/1112] Added postgresql-client to the list of installed apk packages Added psql to the configbaker images. This allows the config maker to interact with the postgres database used by dataverse. --- modules/container-configbaker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 91bf5a2c875..dae4a3aa272 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -21,7 +21,7 @@ ENV SCRIPT_DIR="/scripts" \ ENV PATH="${PATH}:${SCRIPT_DIR}" \ BOOTSTRAP_DIR="${SCRIPT_DIR}/bootstrap" -ARG APK_PACKAGES="curl bind-tools netcat-openbsd jq bash dumb-init wait4x ed" +ARG APK_PACKAGES="curl bind-tools netcat-openbsd jq bash dumb-init wait4x ed postgresql-client" RUN true && \ # Install necessary software and tools From 7eb232e7a3ec542041acb8de0c7bb8303851629c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 12 Apr 2024 14:58:09 -0400 Subject: [PATCH 1109/1112] #10242 catch error for missing file --- .../harvard/iq/dataverse/api/Dataverses.java | 33 +++++++++++-------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 4e6a2edb972..7e5a5e8965c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -871,26 +871,31 @@ public Response getFeaturedDataverses(@Context ContainerRequestContext crc, @Pat */ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) { List dvsFromInput = new LinkedList<>(); - - for (JsonString dvAlias : Util.asJsonArray(dvAliases).getValuesAs(JsonString.class)) { - - Dataverse dvToBeFeatured = dataverseService.findByAlias(dvAlias.getString()); - if (dvToBeFeatured == null) { - return error(Response.Status.BAD_REQUEST, "Can't find dataverse collection with alias '" + dvAlias + "'"); - } - dvsFromInput.add(dvToBeFeatured); - } + try { + + for (JsonString dvAlias : Util.asJsonArray(dvAliases).getValuesAs(JsonString.class)) { + Dataverse dvToBeFeatured = dataverseService.findByAlias(dvAlias.getString()); + if (dvToBeFeatured == null) { + return error(Response.Status.BAD_REQUEST, "Can't find dataverse collection with alias '" + dvAlias + "'"); + } + dvsFromInput.add(dvToBeFeatured); + } + + if (dvsFromInput.isEmpty()) { + return error(Response.Status.BAD_REQUEST, "Please provide a valid Json array of dataverse collection aliases to be featured."); + } + Dataverse dataverse = findDataverseOrDie(dvIdtf); List featuredSource = new ArrayList<>(); List featuredTarget = new ArrayList<>(); featuredSource.addAll(dataverseService.findAllPublishedByOwnerId(dataverse.getId())); featuredSource.addAll(linkingService.findLinkedDataverses(dataverse.getId())); List featuredList = featuredDataverseService.findByDataverseId(dataverse.getId()); - - if(featuredSource.isEmpty()){ - return error(Response.Status.BAD_REQUEST, "There are no collections avaialble to be featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); + + if (featuredSource.isEmpty()) { + return error(Response.Status.BAD_REQUEST, "There are no collections avaialble to be featured in Dataverse collection '" + dataverse.getDisplayName() + "'."); } for (DataverseFeaturedDataverse dfd : featuredList) { @@ -914,9 +919,11 @@ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @Pat // by passing null for Facets and DataverseFieldTypeInputLevel, those are not changed execCommand(new UpdateDataverseCommand(dataverse, null, featuredTarget, createDataverseRequest(getRequestUser(crc)), null)); return ok("Featured Dataverses of dataverse " + dvIdtf + " updated."); - + } catch (WrappedResponse ex) { return ex.getResponse(); + } catch (JsonParsingException jpe){ + return error(Response.Status.BAD_REQUEST, "Please provide a valid Json array of dataverse collection aliases to be featured."); } } From 03aa898140d7c094079315f19751bc67dfee3831 Mon Sep 17 00:00:00 2001 From: landreev Date: Fri, 12 Apr 2024 15:23:56 -0400 Subject: [PATCH 1110/1112] An extra sec. for indexing --- .../java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index d25a4f42872..340eab161bb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -286,6 +286,8 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte System.out.println("Waited " + i + " seconds for the harvest to complete."); + // Let's give the asynchronous indexing an extra sec. to finish: + Thread.sleep(1000L); Response searchHarvestedDatasets = UtilIT.search("metadataSource:" + nickName, normalUserAPIKey); searchHarvestedDatasets.then().assertThat().statusCode(OK.getStatusCode()); searchHarvestedDatasets.prettyPrint(); From 25826fecd55bd2097413f43e4bdd357a13242377 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 Apr 2024 08:38:55 +0200 Subject: [PATCH 1111/1112] ci: enable Maven cache management #10428 - Create a common cache to draw from when no branch caches exist - Rejuvenate the common cache to ensure it's around using pushes, schedule or manual runs - To save space, we automatically trigger a job on a closed PR to delete any caches for the feature branch just merged --- .github/workflows/maven_cache_management.yml | 101 +++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 .github/workflows/maven_cache_management.yml diff --git a/.github/workflows/maven_cache_management.yml b/.github/workflows/maven_cache_management.yml new file mode 100644 index 00000000000..4223f146dde --- /dev/null +++ b/.github/workflows/maven_cache_management.yml @@ -0,0 +1,101 @@ +name: Maven Cache Management + +on: + # Every push to develop should trigger cache rejuvenation (dependencies might have changed) + push: + branches: + - develop + # According to https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy + # all caches are depleted after 7 days of no access. Make sure we rejuvenate every 7 days to keep it available. + schedule: + - cron: '23 2 * * 0' # Run for 'develop' every Sunday at 02:23 UTC (3:23 CET, 21:23 ET) + # Enable manual cache management + workflow_dispatch: + # Deplete branch caches once a PR is merged + pull_request: + types: + - closed + +env: + COMMON_CACHE_KEY: "dataverse-maven-cache" + COMMON_CACHE_PATH: "~/.m2/repository" + +jobs: + seed: + name: Drop and Re-Seed Local Repository + runs-on: ubuntu-latest + if: ${{ github.event_name != 'pull_request' }} + permissions: + # Write permission needed to delete caches + # See also: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + actions: write + contents: read + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Determine Java version from Parent POM + run: echo "JAVA_VERSION=$(grep '' modules/dataverse-parent/pom.xml | cut -f2 -d'>' | cut -f1 -d'<')" >> ${GITHUB_ENV} + - name: Set up JDK ${{ env.JAVA_VERSION }} + uses: actions/setup-java@v4 + with: + java-version: ${{ env.JAVA_VERSION }} + distribution: temurin + - name: Seed common cache + run: | + mvn -B -f modules/dataverse-parent dependency:go-offline dependency:resolve-plugins + # This non-obvious order is due to the fact that the download via Maven above will take a very long time (7-8 min). + # Jobs should not be left without a cache. Deleting and saving in one go leaves only a small chance for a cache miss. + - name: Drop common cache + run: | + gh extension install actions/gh-actions-cache + echo "🛒 Fetching list of cache keys" + cacheKeys=$(gh actions-cache list -R ${{ github.repository }} -B develop | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "🗑️ Deleting caches..." + for cacheKey in $cacheKeys + do + gh actions-cache delete $cacheKey -R ${{ github.repository }} -B develop --confirm + done + echo "✅ Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Save the common cache + uses: actions/cache@v4 + with: + path: ${{ env.COMMON_CACHE_PATH }} + key: ${{ env.COMMON_CACHE_KEY }} + enableCrossOsArchive: true + + # Let's delete feature branch caches once their PR is merged - we only have 10 GB of space before eviction kicks in + deplete: + name: Deplete feature branch caches + runs-on: ubuntu-latest + if: ${{ github.event_name == 'pull_request' }} + permissions: + # `actions:write` permission is required to delete caches + # See also: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + actions: write + contents: read + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Cleanup caches + run: | + gh extension install actions/gh-actions-cache + + BRANCH=refs/pull/${{ github.event.pull_request.number }}/merge + echo "🛒 Fetching list of cache keys" + cacheKeysForPR=$(gh actions-cache list -R ${{ github.repository }} -B $BRANCH | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "🗑️ Deleting caches..." + for cacheKey in $cacheKeysForPR + do + gh actions-cache delete $cacheKey -R ${{ github.repository }} -B $BRANCH --confirm + done + echo "✅ Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 086cc61c225ea441494522a4f939f24f13a68ba3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 16 Apr 2024 16:18:05 +0200 Subject: [PATCH 1112/1112] style(ci): fix wording as suggested by @pdurbin "Deplete" seems harder to understand than just plain ol' "delete". Co-authored-by: Philip Durbin --- .github/workflows/maven_cache_management.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/maven_cache_management.yml b/.github/workflows/maven_cache_management.yml index 4223f146dde..fedf63b7c54 100644 --- a/.github/workflows/maven_cache_management.yml +++ b/.github/workflows/maven_cache_management.yml @@ -6,12 +6,12 @@ on: branches: - develop # According to https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy - # all caches are depleted after 7 days of no access. Make sure we rejuvenate every 7 days to keep it available. + # all caches are deleted after 7 days of no access. Make sure we rejuvenate every 7 days to keep it available. schedule: - cron: '23 2 * * 0' # Run for 'develop' every Sunday at 02:23 UTC (3:23 CET, 21:23 ET) # Enable manual cache management workflow_dispatch: - # Deplete branch caches once a PR is merged + # Delete branch caches once a PR is merged pull_request: types: - closed