diff --git a/src/main/java/org/dataone/hashstore/HashStore.java b/src/main/java/org/dataone/hashstore/HashStore.java index 38ed45e7..9a127b80 100644 --- a/src/main/java/org/dataone/hashstore/HashStore.java +++ b/src/main/java/org/dataone/hashstore/HashStore.java @@ -1,6 +1,5 @@ package org.dataone.hashstore; -import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; @@ -16,138 +15,149 @@ * must implement the HashStore interface to ensure proper usage of the system. */ public interface HashStore { - /** - * The `storeObject` method is responsible for the atomic storage of objects to - * disk using a given InputStream and a persistent identifier (pid). Upon - * successful storage, the method returns a HashAddress object containing - * relevant file information, such as the file's id, relative path, absolute - * path, duplicate object status, and hex digest map of algorithms and - * checksums. `storeObject` also ensures that an object is stored only once by - * synchronizing multiple calls and rejecting calls to store duplicate objects. - * - * The file's id is determined by calculating the SHA-256 hex digest of the - * provided pid, which is also used as the permanent address of the file. The - * file's identifier is then sharded using a depth of 3 and width of 2, - * delimited by '/' and concatenated to produce the final permanent address - * and is stored in the `/[...storeDirectory]/objects/` directory. - * - * By default, the hex digest map includes the following hash algorithms: MD5, - * SHA-1, SHA-256, SHA-384 and SHA-512, which are the most commonly used - * algorithms in dataset submissions to DataONE and the Arctic Data Center. If - * an additional algorithm is provided, the `storeObject` method checks if it is - * supported and adds it to the map along with its corresponding hex digest. An - * algorithm is considered "supported" if it is recognized as a valid hash - * algorithm in the `java.security.MessageDigest` class. - * - * Similarly, if a checksum and a checksumAlgorithm value are provided, - * `storeObject` validates the object to ensure it matches what is provided - * before moving the file to its permanent address. - * - * @param object Input stream to file - * @param pid Authority-based identifier - * @param additionalAlgorithm Additional hex digest to include in hexDigests - * @param checksum Value of checksum to validate against - * @param checksumAlgorithm Algorithm of checksum submitted - * @return HashAddress object encapsulating file information - * @throws NoSuchAlgorithmException When additionalAlgorithm or - * checksumAlgorithm is invalid - * @throws IOException I/O Error when writing file, generating - * checksums and/or moving file - * @throws PidObjectExistsException When duplicate pid object is found - * @throws RuntimeException Thrown when there is an issue with - * permissions, illegal arguments (ex. - * empty pid) or null pointers - */ - HashAddress storeObject(InputStream object, String pid, String additionalAlgorithm, String checksum, - String checksumAlgorithm) - throws NoSuchAlgorithmException, IOException, PidObjectExistsException, RuntimeException; + /** + * The `storeObject` method is responsible for the atomic storage of objects to + * disk using a given InputStream and a persistent identifier (pid). Upon + * successful storage, the method returns a HashAddress object containing + * relevant file information, such as the file's id, relative path, absolute + * path, duplicate object status, and hex digest map of algorithms and + * checksums. `storeObject` also ensures that an object is stored only once by + * synchronizing multiple calls and rejecting calls to store duplicate objects. + * + * The file's id is determined by calculating the SHA-256 hex digest of the + * provided pid, which is also used as the permanent address of the file. The + * file's identifier is then sharded using a depth of 3 and width of 2, + * delimited by '/' and concatenated to produce the final permanent address + * and is stored in the `/[...storeDirectory]/objects/` directory. + * + * By default, the hex digest map includes the following hash algorithms: MD5, + * SHA-1, SHA-256, SHA-384 and SHA-512, which are the most commonly used + * algorithms in dataset submissions to DataONE and the Arctic Data Center. If + * an additional algorithm is provided, the `storeObject` method checks if it is + * supported and adds it to the map along with its corresponding hex digest. An + * algorithm is considered "supported" if it is recognized as a valid hash + * algorithm in the `java.security.MessageDigest` class. + * + * Similarly, if a checksum and a checksumAlgorithm value are provided, + * `storeObject` validates the object to ensure it matches what is provided + * before moving the file to its permanent address. + * + * @param object Input stream to file + * @param pid Authority-based identifier + * @param additionalAlgorithm Additional hex digest to include in hexDigests + * @param checksum Value of checksum to validate against + * @param checksumAlgorithm Algorithm of checksum submitted + * @return HashAddress object encapsulating file information + * @throws NoSuchAlgorithmException When additionalAlgorithm or + * checksumAlgorithm is invalid + * @throws IOException I/O Error when writing file, generating + * checksums and/or moving file + * @throws PidObjectExistsException When duplicate pid object is found + * @throws RuntimeException Thrown when there is an issue with + * permissions, illegal arguments (ex. + * empty pid) or null pointers + */ + HashAddress storeObject(InputStream object, String pid, String additionalAlgorithm, String checksum, + String checksumAlgorithm) + throws NoSuchAlgorithmException, IOException, PidObjectExistsException, RuntimeException; - /** - * The `storeMetadata` method is responsible for adding/updating metadata - * (ex. `sysmeta`) to disk using a given InputStream, a persistent identifier - * (pid) and metadata format (formatId). The metadata object consists of a - * header and body portion. The header is formed by writing the namespace/format - * (utf-8) of the metadata document followed by a null character `\u0000` and - * the body (metadata content) follows immediately after. - * - * The permanent address of the metadata document is determined by calculating - * the SHA-256 hex digest of the provided `pid` + `format_id`; and the body - * contains the metadata content (ex. `sysmeta`). - * - * Upon successful storage of metadata, `store_metadata` returns a string that - * represents the file's permanent address. Lastly, the metadata objects are - * stored in parallel to objects in the `/store_directory/metadata/` directory. - * - * @param metadata Input stream to metadata document - * @param pid Authority-based identifier - * @param formatId Metadata namespace/format - * @return Metadata content identifier (string representing metadata address) - * @throws IOException When there is an error writing the metadata - * document - * @throws IllegalArgumentException Invalid values like null for metadata, or - * empty pids and formatIds - * @throws FileNotFoundException When temp metadata file is not found - * @throws InterruptedException metadataLockedIds synchronization issue - * @throws NoSuchAlgorithmException Algorithm used to calculate permanent - * address is not supported - */ - String storeMetadata(InputStream metadata, String pid, String formatId) - throws IOException, IllegalArgumentException, FileNotFoundException, InterruptedException, - NoSuchAlgorithmException; + /** + * The `storeMetadata` method is responsible for adding/updating metadata + * (ex. `sysmeta`) to disk using a given InputStream, a persistent identifier + * (pid) and metadata format (formatId). The metadata object contains solely the + * given metadata content. + * + * The permanent address of the metadata document is determined by calculating + * the SHA-256 hex digest of the provided `pid` + `format_id`; and the body + * contains the metadata content (ex. `sysmeta`). + * + * Upon successful storage of metadata, `storeMetadata` returns a string that + * represents the path of the file's permanent address, as described above. + * Lastly, the metadata objects are stored in parallel to objects in the + * `/store_directory/metadata/` directory. + * + * @param metadata Input stream to metadata document + * @param pid Authority-based identifier + * @param formatId Metadata namespace/format + * @return Metadata content identifier (string representing metadata address) + * @throws IOException When there is an error writing the metadata + * document + * @throws IllegalArgumentException Invalid values like null for metadata, or + * empty pids and formatIds + * @throws FileNotFoundException When temp metadata file is not found + * @throws InterruptedException metadataLockedIds synchronization issue + * @throws NoSuchAlgorithmException Algorithm used to calculate permanent + * address is not supported + */ + String storeMetadata(InputStream metadata, String pid, String formatId) + throws IOException, IllegalArgumentException, FileNotFoundException, InterruptedException, + NoSuchAlgorithmException; - /** - * The `retrieveObject` method retrieves an object from disk using a given - * persistent identifier (pid). If the object exists (determined by calculating - * the object's permanent address using the SHA-256 hash of the given pid), the - * method will open and return a buffered object stream ready to read from. - * - * @param pid Authority-based identifier - * @return A buffered stream of the object - * @throws Exception TODO: Add specific exceptions - */ - BufferedReader retrieveObject(String pid) throws Exception; + /** + * The `retrieveObject` method retrieves an object from disk using a given + * persistent identifier (pid). If the object exists (determined by calculating + * the object's permanent address using the SHA-256 hash of the given pid), the + * method will open and return a buffered object stream ready to read from. + * + * @param pid Authority-based identifier + * @return Object InputStream + * @throws IllegalArgumentException When pid is null or empty + * @throws FileNotFoundException When requested pid has no associated object + * @throws IOException I/O error when creating InputStream to + * object + * @throws NoSuchAlgorithmException When algorithm used to calcualte object + * address is not supported + */ + InputStream retrieveObject(String pid) + throws IllegalArgumentException, FileNotFoundException, IOException, NoSuchAlgorithmException; - /** - * The 'retrieveMetadata' method retrieves the metadata content of a given pid - * and metadata namespace from disk and returns it in the form of a String. - * - * @param pid Authority-based identifier - * @param formatId Metadata namespace/format - * @return Sysmeta (metadata) document of given pid - * @throws Exception TODO: Add specific exceptions - */ - String retrieveMetadata(String pid, String formatId) throws Exception; + /** + * The 'retrieveMetadata' method retrieves the metadata content of a given pid + * and metadata namespace from disk and returns it in the form of a String. + * + * @param pid Authority-based identifier + * @param formatId Metadata namespace/format + * @return Metadata InputStream + * @throws IllegalArgumentException When pid/formatId is null or empty + * @throws FileNotFoundException When requested pid+formatId has no + * associated object + * @throws IOException I/O error when creating InputStream to + * metadata + * @throws NoSuchAlgorithmException When algorithm used to calcualte metadata + * address is not supported + */ + InputStream retrieveMetadata(String pid, String formatId) throws Exception; - /** - * The 'deleteObject' method deletes an object permanently from disk using a - * given persistent identifier. - * - * @param pid Authority-based identifier - * @return - * @throws Exception TODO: Add specific exceptions - */ - boolean deleteObject(String pid) throws Exception; + /** + * The 'deleteObject' method deletes an object permanently from disk using a + * given persistent identifier. + * + * @param pid Authority-based identifier + * @return + * @throws Exception TODO: Add specific exceptions + */ + boolean deleteObject(String pid) throws Exception; - /** - * The 'deleteMetadata' method deletes a metadata document (ex. `sysmeta`) - * permanently from disk using a given persistent identifier and its respective - * metadata namespace. - * - * @param pid Authority-based identifier - * @param formatId Metadata namespace/format - * @return - * @throws Exception TODO: Add specific exceptions - */ - boolean deleteMetadata(String pid, String formatId) throws Exception; + /** + * The 'deleteMetadata' method deletes a metadata document (ex. `sysmeta`) + * permanently from disk using a given persistent identifier and its respective + * metadata namespace. + * + * @param pid Authority-based identifier + * @param formatId Metadata namespace/format + * @return + * @throws Exception TODO: Add specific exceptions + */ + boolean deleteMetadata(String pid, String formatId) throws Exception; - /** - * The 'getHexDigest' method calculates the hex digest of an object that exists - * in HashStore using a given persistent identifier and hash algorithm. - * - * @param pid Authority-based identifier - * @param algorithm Algorithm of desired hex digest - * @return - * @throws Exception TODO: Add specific exceptions - */ - String getHexDigest(String pid, String algorithm) throws Exception; + /** + * The 'getHexDigest' method calculates the hex digest of an object that exists + * in HashStore using a given persistent identifier and hash algorithm. + * + * @param pid Authority-based identifier + * @param algorithm Algorithm of desired hex digest + * @return + * @throws Exception TODO: Add specific exceptions + */ + String getHexDigest(String pid, String algorithm) throws Exception; } diff --git a/src/main/java/org/dataone/hashstore/filehashstore/FileHashStore.java b/src/main/java/org/dataone/hashstore/filehashstore/FileHashStore.java index 1079d097..7d532861 100644 --- a/src/main/java/org/dataone/hashstore/filehashstore/FileHashStore.java +++ b/src/main/java/org/dataone/hashstore/filehashstore/FileHashStore.java @@ -1,6 +1,5 @@ package org.dataone.hashstore.filehashstore; -import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; @@ -124,8 +123,8 @@ public FileHashStore(HashMap hashstoreProperties) } if (storeDepth <= 0 || storeWidth <= 0) { - String errMsg = "FileHashStore - Depth and width must be greater than 0. Depth: " + storeDepth + ". Width: " - + storeWidth; + String errMsg = "FileHashStore - Depth and width must be greater than 0. Depth: " + storeDepth + + ". Width: " + storeWidth; logFileHashStore.fatal(errMsg); throw new IllegalArgumentException(errMsg); } @@ -167,7 +166,7 @@ public FileHashStore(HashMap hashstoreProperties) if (Files.isDirectory(storePath)) { File[] storePathFileList = storePath.toFile().listFiles(); if (storePathFileList == null || storePathFileList.length > 0) { - String errMsg = "FileHashStore - Missing 'hashstore.yaml' but HashStore directories and/or objects found."; + String errMsg = "FileHashStore - Missing 'hashstore.yaml' but directories and/or objects found."; logFileHashStore.fatal(errMsg); throw new IllegalStateException(errMsg); } @@ -257,7 +256,7 @@ protected HashMap getHashStoreYaml(Path storePath) throws IOExce * Write a 'hashstore.yaml' file to this.STORE_ROOT * * @param yamlString Content of the HashStore configuration - * @throws IOException If unable to write `hashtore.yaml` + * @throws IOException If unable to write `hashstore.yaml` */ protected void putHashStoreYaml(String yamlString) throws IOException { Path hashstoreYaml = this.STORE_ROOT.resolve("hashstore.yaml"); @@ -522,15 +521,90 @@ public String storeMetadata(InputStream metadata, String pid, String formatId) } @Override - public BufferedReader retrieveObject(String pid) throws Exception { - // TODO: Implement method - return null; + public InputStream retrieveObject(String pid) + throws IllegalArgumentException, NoSuchAlgorithmException, FileNotFoundException, IOException { + logFileHashStore.debug("FileHashStore.retrieveObject - Called to retrieve object for pid: " + pid); + + if (pid == null || pid.trim().isEmpty()) { + String errMsg = "FileHashStore.retrieveObject - pid cannot be null or empty, pid: " + pid; + logFileHashStore.error(errMsg); + throw new IllegalArgumentException(errMsg); + } + + // Get permanent address of the pid by calculating its sha-256 hex digest + String objectCid = this.getPidHexDigest(pid, OBJECT_STORE_ALGORITHM); + String objShardString = this.getHierarchicalPathString(this.DIRECTORY_DEPTH, this.DIRECTORY_WIDTH, + objectCid); + Path objHashAddressPath = this.OBJECT_STORE_DIRECTORY.resolve(objShardString); + + // Check to see if object exists + if (!Files.exists(objHashAddressPath)) { + String errMsg = "FileHashStore.retrieveObject - File does not exist for pid: " + pid + + " with object address: " + objHashAddressPath; + logFileHashStore.warn(errMsg); + throw new FileNotFoundException(errMsg); + } + + // If so, return an input stream for the object + try { + InputStream objectCidInputStream = Files.newInputStream(objHashAddressPath); + logFileHashStore.info("FileHashStore.retrieveObject - Retrieved object for pid: " + pid); + return objectCidInputStream; + + } catch (IOException ioe) { + String errMsg = "FileHashStore.retrieveObject - Unexpected error when creating InputStream for pid: " + + pid + ", IOException: " + ioe.getMessage(); + logFileHashStore.error(errMsg); + throw new IOException(errMsg); + + } + } @Override - public String retrieveMetadata(String pid, String formatId) throws Exception { - // TODO: Implement method - return null; + public InputStream retrieveMetadata(String pid, String formatId) throws Exception { + logFileHashStore.debug("FileHashStore.retrieveMetadata - Called to retrieve metadata for pid: " + pid + + " with formatId: " + formatId); + + if (pid == null || pid.trim().isEmpty()) { + String errMsg = "FileHashStore.retrieveMetadata - pid cannot be null or empty, pid: " + pid; + logFileHashStore.error(errMsg); + throw new IllegalArgumentException(errMsg); + } + if (formatId == null || formatId.trim().isEmpty()) { + String errMsg = "FileHashStore.retrieveMetadata - formatId cannot be null or empty, formatId: " + pid; + logFileHashStore.error(errMsg); + throw new IllegalArgumentException(errMsg); + } + + // Get permanent address of the pid by calculating its sha-256 hex digest + String metadataCid = this.getPidHexDigest(pid + formatId, OBJECT_STORE_ALGORITHM); + String metadataShardString = this.getHierarchicalPathString(this.DIRECTORY_DEPTH, this.DIRECTORY_WIDTH, + metadataCid); + Path metadataHashAddressPath = this.METADATA_STORE_DIRECTORY.resolve(metadataShardString); + + // Check to see if metadata exists + if (!Files.exists(metadataHashAddressPath)) { + String errMsg = "FileHashStore.retrieveMetadata - Metadata does not exist for pid: " + pid + + " with formatId: " + formatId + ". Metadata address: " + metadataHashAddressPath; + logFileHashStore.warn(errMsg); + throw new FileNotFoundException(errMsg); + } + + // If so, return an input stream for the metadata + try { + InputStream metadataCidInputStream = Files.newInputStream(metadataHashAddressPath); + logFileHashStore.info("FileHashStore.retrieveMetadata - Retrieved metadata for pid: " + pid + + " with formatId: " + formatId); + return metadataCidInputStream; + + } catch (IOException ioe) { + String errMsg = "FileHashStore.retrieveMetadata - Unexpected error when creating InputStream for pid: " + + pid + " with formatId: " + formatId + ". IOException: " + ioe.getMessage(); + logFileHashStore.error(errMsg); + throw new IOException(errMsg); + + } } @Override @@ -621,9 +695,9 @@ protected HashAddress putObject(InputStream object, String pid, String additiona boolean requestValidation = this.verifyChecksumParameters(checksum, checksumAlgorithm); // Gather HashAddress elements and prepare object permanent address - String objAuthorityId = this.getPidHexDigest(pid, this.OBJECT_STORE_ALGORITHM); + String objectCid = this.getPidHexDigest(pid, this.OBJECT_STORE_ALGORITHM); String objShardString = this.getHierarchicalPathString(this.DIRECTORY_DEPTH, this.DIRECTORY_WIDTH, - objAuthorityId); + objectCid); Path objHashAddressPath = this.OBJECT_STORE_DIRECTORY.resolve(objShardString); String objHashAddressString = objHashAddressPath.toString(); @@ -644,11 +718,11 @@ protected HashAddress putObject(InputStream object, String pid, String additiona // Validate object if checksum and checksum algorithm is passed if (requestValidation) { logFileHashStore - .info("FileHashStore.putObject - Validating object - checksum and checksumAlgorithm supplied and valid."); + .info("FileHashStore.putObject - Validating object, checksum arguments supplied and valid."); String digestFromHexDigests = hexDigests.get(checksumAlgorithm); if (digestFromHexDigests == null) { - String errMsg = "FileHashStore.putObject - checksum not found in hex digest map when validating object. checksumAlgorithm checked: " - + checksumAlgorithm; + String errMsg = "FileHashStore.putObject - checksum not found in hex digest map when validating object." + + " checksumAlgorithm checked: " + checksumAlgorithm; logFileHashStore.error(errMsg); throw new NoSuchAlgorithmException(errMsg); } @@ -657,12 +731,12 @@ protected HashAddress putObject(InputStream object, String pid, String additiona // Delete tmp File boolean deleteStatus = tmpFile.delete(); if (!deleteStatus) { - String errMsg = "FileHashStore.putObject - Object cannot be validated and failed to delete tmpFile: " + String errMsg = "FileHashStore.putObject - Object cannot be validated, failed to delete tmpFile: " + tmpFile.getName(); logFileHashStore.error(errMsg); throw new IOException(errMsg); } - String errMsg = "FileHashStore.putObject - Checksum supplied does not equal to the calculated hex digest: " + String errMsg = "FileHashStore.putObject - Checksum given is not equal to the calculated hex digest: " + digestFromHexDigests + ". Checksum provided: " + checksum + ". Deleting tmpFile: " + tmpFile.getName(); logFileHashStore.error(errMsg); @@ -677,13 +751,13 @@ protected HashAddress putObject(InputStream object, String pid, String additiona if (Files.exists(objHashAddressPath)) { boolean deleteStatus = tmpFile.delete(); if (!deleteStatus) { - String errMsg = "FileHashStore.putObject - Object is found to be a duplicate after writing tmpFile. Attempted to delete tmpFile but failed: " - + tmpFile.getName(); + String errMsg = "FileHashStore.putObject - Object is found to be a duplicate after writing tmpFile." + + " Attempted to delete tmpFile but failed: " + tmpFile.getName(); logFileHashStore.error(errMsg); throw new IOException(errMsg); } - objAuthorityId = null; + objectCid = null; objShardString = null; objHashAddressString = null; logFileHashStore.info( @@ -700,7 +774,7 @@ protected HashAddress putObject(InputStream object, String pid, String additiona } // Create HashAddress object to return with pertinent data - return new HashAddress(objAuthorityId, objShardString, objHashAddressString, isDuplicate, + return new HashAddress(objectCid, objShardString, objHashAddressString, isDuplicate, hexDigests); } @@ -754,13 +828,13 @@ protected boolean verifyChecksumParameters(String checksum, String checksumAlgor // If checksum is supplied, checksumAlgorithm cannot be empty if (checksum != null && !checksum.trim().isEmpty()) { if (checksumAlgorithm == null) { - String errMsg = "FileHashStore.verifyChecksumParameters - Validation requested but checksumAlgorithm is null."; + String errMsg = "FileHashStore.verifyChecksumParameters - checksumAlgorithm is null."; logFileHashStore.error(errMsg); throw new IllegalArgumentException(errMsg); } if (checksumAlgorithm.trim().isEmpty()) { - String errMsg = "FileHashStore.verifyChecksumParameters - Validation requested but checksumAlgorithm is empty."; + String errMsg = "FileHashStore.verifyChecksumParameters - checksumAlgorithm is empty."; logFileHashStore.error(errMsg); throw new IllegalArgumentException(errMsg); } @@ -772,13 +846,13 @@ protected boolean verifyChecksumParameters(String checksum, String checksumAlgor // Ensure checksum is not null or empty if checksumAlgorithm is supplied in if (requestValidation) { if (checksum == null) { - String errMsg = "FileHashStore.verifyChecksumParameters - Validation requested but checksum is null."; + String errMsg = "FileHashStore.verifyChecksumParameters - checksum is null."; logFileHashStore.error(errMsg); throw new NullPointerException(errMsg); } if (checksum.trim().isEmpty()) { - String errMsg = "FileHashStore.verifyChecksumParameters - Validation requested but checksum is empty."; + String errMsg = "FileHashStore.verifyChecksumParameters - checksum is empty."; logFileHashStore.error(errMsg); throw new IllegalArgumentException(errMsg); } @@ -941,14 +1015,14 @@ protected Map writeToTmpFileAndGenerateChecksums(File tmpFile, I MessageDigest sha512 = MessageDigest.getInstance(DefaultHashAlgorithms.SHA_512.name().replace("_", "-")); if (additionalAlgorithm != null) { logFileHashStore.debug( - "FileHashStore.writeToTmpFileAndGenerateChecksums - Adding additional algorithm to hex digest map, algorithm: " - + additionalAlgorithm); + "FileHashStore.writeToTmpFileAndGenerateChecksums - Adding additional algorithm to hex digest map," + + " algorithm: " + additionalAlgorithm); additionalAlgo = MessageDigest.getInstance(additionalAlgorithm); } if (checksumAlgorithm != null && !checksumAlgorithm.equals(additionalAlgorithm)) { logFileHashStore.debug( - "FileHashStore.writeToTmpFileAndGenerateChecksums - Adding checksum algorithm to hex digest map, algorithm: " - + checksumAlgorithm); + "FileHashStore.writeToTmpFileAndGenerateChecksums - Adding checksum algorithm to hex digest map," + + " algorithm: " + checksumAlgorithm); checksumAlgo = MessageDigest.getInstance(checksumAlgorithm); } @@ -1028,7 +1102,8 @@ protected boolean move(File source, File target, String entity) + source + ", to target: " + target); // Validate input parameters if (entity == null) { - String errMsg = "FileHashStore.move - entity cannot be null, must be 'object' for storeObject() or 'metadata' for storeMetadata()"; + String errMsg = "FileHashStore.move - entity cannot be null, must be 'object' for storeObject() or" + + " 'metadata' for storeMetadata()"; logFileHashStore.debug(errMsg); throw new NullPointerException(errMsg); @@ -1056,14 +1131,14 @@ protected boolean move(File source, File target, String entity) Path targetFilePath = target.toPath(); try { Files.move(sourceFilePath, targetFilePath, StandardCopyOption.ATOMIC_MOVE); - logFileHashStore.debug("FileHashStore.move - file moved from: " + sourceFilePath + ", to: " - + targetFilePath); + logFileHashStore + .debug("FileHashStore.move - file moved from: " + sourceFilePath + ", to: " + targetFilePath); return true; } catch (AtomicMoveNotSupportedException amnse) { logFileHashStore.error( - "FileHashStore.move - StandardCopyOption.ATOMIC_MOVE failed. AtomicMove is not supported across file systems. Source: " - + source + ". Target: " + target); + "FileHashStore.move - StandardCopyOption.ATOMIC_MOVE failed. AtomicMove is not supported across" + + " file systems. Source: " + source + ". Target: " + target); throw amnse; } catch (IOException ioe) { @@ -1129,7 +1204,7 @@ protected String putMetadata(InputStream metadata, String pid, String formatId) // Store metadata to tmpMetadataFile File tmpMetadataFile = this.generateTmpFile("tmp", this.METADATA_TMP_FILE_DIRECTORY); - boolean tmpMetadataWritten = this.writeToTmpMetadataFile(tmpMetadataFile, metadata, checkedFormatId); + boolean tmpMetadataWritten = this.writeToTmpMetadataFile(tmpMetadataFile, metadata); if (tmpMetadataWritten) { logFileHashStore.debug( "FileHashStore.putObject - tmp metadata file has been written, moving to permanent location: " @@ -1143,29 +1218,21 @@ protected String putMetadata(InputStream metadata, String pid, String formatId) } /** - * Write the given formatId, followed by a null character `\u0000`, and metadata - * content into a file + * Write the supplied metadata content into the given tmpFile * * @param tmpFile File to write into * @param metadataStream Stream of metadata content - * @param formatId Namespace/format of metadata * * @return True if file is written successfully * @throws IOException When an I/O error occurs * @throws FileNotFoundException When given file to write into is not found */ - protected boolean writeToTmpMetadataFile(File tmpFile, InputStream metadataStream, String formatId) + protected boolean writeToTmpMetadataFile(File tmpFile, InputStream metadataStream) throws IOException, FileNotFoundException { FileOutputStream os = new FileOutputStream(tmpFile); try { - // Write formatId - byte[] metadataHeaderBytes = formatId.getBytes(StandardCharsets.UTF_8); - os.write(metadataHeaderBytes); - // Followed by null character - os.write('\u0000'); - - // Write metadata content (body) + // Write metadata content byte[] buffer = new byte[8192]; int bytesRead; while ((bytesRead = metadataStream.read(buffer)) != -1) { diff --git a/src/test/java/org/dataone/hashstore/HashStoreTest.java b/src/test/java/org/dataone/hashstore/HashStoreTest.java index 5b7bcde3..08b2292b 100644 --- a/src/test/java/org/dataone/hashstore/HashStoreTest.java +++ b/src/test/java/org/dataone/hashstore/HashStoreTest.java @@ -96,8 +96,8 @@ public void hashStore_storeObjects() throws Exception { InputStream dataStream = Files.newInputStream(testDataFile); HashAddress objInfo = hashStore.storeObject(dataStream, pid, null, null, null); - // Check id (sha-256 hex digest of the ab_id, aka s_cid) - String objAuthorityId = testData.pidData.get(pid).get("s_cid"); + // Check id (sha-256 hex digest of the ab_id, aka object_cid) + String objAuthorityId = testData.pidData.get(pid).get("object_cid"); assertEquals(objAuthorityId, objInfo.getId()); assertTrue(Files.exists(Paths.get(objInfo.getAbsPath()))); } diff --git a/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreInterfaceTest.java b/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreInterfaceTest.java index 6e2b3405..29bd059f 100644 --- a/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreInterfaceTest.java +++ b/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreInterfaceTest.java @@ -1,10 +1,12 @@ package org.dataone.hashstore.filehashstore; import java.io.File; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; @@ -14,10 +16,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Stream; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import javax.xml.bind.DatatypeConverter; import org.dataone.hashstore.HashAddress; import org.dataone.hashstore.exceptions.PidObjectExistsException; @@ -27,6 +26,8 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; +import static org.junit.Assert.*; + /** * Test class for FileHashStore HashStoreInterface override methods */ @@ -77,9 +78,9 @@ public void storeObject() throws Exception { InputStream dataStream = Files.newInputStream(testDataFile); HashAddress objInfo = fileHashStore.storeObject(dataStream, pid, null, null, null); - // Check id (sha-256 hex digest of the ab_id, aka s_cid) - String objAuthorityId = testData.pidData.get(pid).get("s_cid"); - assertEquals(objAuthorityId, objInfo.getId()); + // Check id (sha-256 hex digest of the ab_id (pid)) + String objectCid = testData.pidData.get(pid).get("object_cid"); + assertEquals(objectCid, objInfo.getId()); } } @@ -96,8 +97,8 @@ public void storeObject_relPath() throws Exception { HashAddress objInfo = fileHashStore.storeObject(dataStream, pid, null, null, null); // Check relative path - String objAuthorityId = testData.pidData.get(pid).get("s_cid"); - String objRelPath = fileHashStore.getHierarchicalPathString(3, 2, objAuthorityId); + String objectCid = testData.pidData.get(pid).get("object_cid"); + String objRelPath = fileHashStore.getHierarchicalPathString(3, 2, objectCid); assertEquals(objRelPath, objInfo.getRelPath()); } } @@ -647,4 +648,289 @@ public void storeMetadata_metadataLockedIds() throws Exception { assertEquals(fileCount, 2); } } + + /** + * Check that retrieveObject returns an InputStream + */ + @Test + public void retrieveObject() throws Exception { + for (String pid : testData.pidList) { + String pidFormatted = pid.replace("/", "_"); + Path testDataFile = testData.getTestFile(pidFormatted); + + InputStream dataStream = Files.newInputStream(testDataFile); + fileHashStore.storeObject(dataStream, pid, null, null, null); + + // Retrieve object + InputStream objectCidInputStream = fileHashStore.retrieveObject(pid); + assertNotNull(objectCidInputStream); + } + } + + /** + * Check that retrieveObject throws exception when pid is null + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveObject_pidNull() throws Exception { + try { + InputStream pidInputStream = fileHashStore.retrieveObject(null); + pidInputStream.close(); + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveObject throws exception when pid is empty + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveObject_pidEmpty() throws Exception { + try { + InputStream pidInputStream = fileHashStore.retrieveObject(""); + pidInputStream.close(); + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveObject throws exception when pid is empty spaces + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveObject_pidEmptySpaces() throws Exception { + try { + InputStream pidInputStream = fileHashStore.retrieveObject(" "); + pidInputStream.close(); + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveObject throws exception when file is not found + */ + @Test(expected = FileNotFoundException.class) + public void retrieveObject_pidNotFound() throws Exception { + try { + InputStream pidInputStream = fileHashStore.retrieveObject("dou.2023.hs.1"); + pidInputStream.close(); + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveObject InputStream content is correct + */ + @Test + public void retrieveObject_verifyContent() throws Exception { + for (String pid : testData.pidList) { + String pidFormatted = pid.replace("/", "_"); + Path testDataFile = testData.getTestFile(pidFormatted); + + InputStream dataStream = Files.newInputStream(testDataFile); + fileHashStore.storeObject(dataStream, pid, null, null, null); + + // Retrieve object + InputStream objectCidInputStream; + try { + objectCidInputStream = fileHashStore.retrieveObject(pid); + } catch (Exception e) { + e.printStackTrace(); + throw e; + } + + // Read content and compare it to the SHA-256 checksum from TestDataHarness + MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); + try { + byte[] buffer = new byte[8192]; + int bytesRead; + while ((bytesRead = objectCidInputStream.read(buffer)) != -1) { + sha256.update(buffer, 0, bytesRead); + } + } catch (IOException ioe) { + ioe.printStackTrace(); + throw ioe; + } + + // Get hex digest + String sha256Digest = DatatypeConverter.printHexBinary(sha256.digest()).toLowerCase(); + String sha256DigestFromTestData = testData.pidData.get(pid).get("sha256"); + assertEquals(sha256Digest, sha256DigestFromTestData); + + // Close stream + objectCidInputStream.close(); + } + } + + /** + * Check that retrieveMetadata returns an InputStream + */ + @Test + public void retrieveMetadata() throws Exception { + for (String pid : testData.pidList) { + String pidFormatted = pid.replace("/", "_"); + + // Get test metadata file + Path testMetaDataFile = testData.getTestFile(pidFormatted + ".xml"); + + InputStream metadataStream = Files.newInputStream(testMetaDataFile); + fileHashStore.storeMetadata(metadataStream, pid, null); + + String storeFormatId = (String) this.fhsProperties.get("storeMetadataNamespace"); + + InputStream metadataCidInputStream = fileHashStore.retrieveMetadata(pid, storeFormatId); + assertNotNull(metadataCidInputStream); + } + } + + /** + * Check that retrieveMetadata throws exception when pid is null + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveMetadata_pidNull() throws Exception { + try { + String storeFormatId = (String) this.fhsProperties.get("storeMetadataNamespace"); + InputStream pidInputStream = fileHashStore.retrieveMetadata(null, storeFormatId); + pidInputStream.close(); + + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveMetadata throws exception when pid is empty + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveMetadata_pidEmpty() throws Exception { + try { + String storeFormatId = (String) this.fhsProperties.get("storeMetadataNamespace"); + InputStream pidInputStream = fileHashStore.retrieveMetadata("", storeFormatId); + pidInputStream.close(); + + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveMetadata throws exception when pid is empty spaces + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveMetadata_pidEmptySpaces() throws Exception { + try { + String storeFormatId = (String) this.fhsProperties.get("storeMetadataNamespace"); + InputStream pidInputStream = fileHashStore.retrieveMetadata(" ", storeFormatId); + pidInputStream.close(); + + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveMetadata throws exception when format is null + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveMetadata_formatNull() throws Exception { + try { + InputStream pidInputStream = fileHashStore.retrieveMetadata("dou.2023.hs.1", null); + pidInputStream.close(); + + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveMetadata throws exception when format is empty + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveMetadata_formatEmpty() throws Exception { + try { + InputStream pidInputStream = fileHashStore.retrieveMetadata("dou.2023.hs.1", ""); + pidInputStream.close(); + + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveMetadata throws exception when format is empty spaces + */ + @Test(expected = IllegalArgumentException.class) + public void retrieveMetadata_formatEmptySpaces() throws Exception { + try { + InputStream pidInputStream = fileHashStore.retrieveMetadata("dou.2023.hs.1", " "); + pidInputStream.close(); + + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveMetadata throws exception when file is not found + */ + @Test(expected = FileNotFoundException.class) + public void retrieveMetadata_pidNotFound() throws Exception { + try { + String storeFormatId = (String) this.fhsProperties.get("storeMetadataNamespace"); + InputStream pidInputStream = fileHashStore.retrieveMetadata("dou.2023.hs.1", storeFormatId); + pidInputStream.close(); + + } catch (Exception e) { + throw e; + } + } + + /** + * Check that retrieveMetadata InputStream content is correct + */ + @Test + public void retrieveMetadata_verifyContent() throws Exception { + for (String pid : testData.pidList) { + String pidFormatted = pid.replace("/", "_"); + + // Get test metadata file + Path testMetaDataFile = testData.getTestFile(pidFormatted + ".xml"); + + InputStream metadataStream = Files.newInputStream(testMetaDataFile); + fileHashStore.storeMetadata(metadataStream, pid, null); + + String storeFormatId = (String) this.fhsProperties.get("storeMetadataNamespace"); + + // Retrieve object + InputStream metadataCidInputStream; + try { + metadataCidInputStream = fileHashStore.retrieveMetadata(pid, storeFormatId); + } catch (Exception e) { + e.printStackTrace(); + throw e; + } + + // Read content and compare it to the SHA-256 checksum from TestDataHarness + MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); + try { + byte[] buffer = new byte[8192]; + int bytesRead; + while ((bytesRead = metadataCidInputStream.read(buffer)) != -1) { + sha256.update(buffer, 0, bytesRead); + } + } catch (IOException ioe) { + ioe.printStackTrace(); + throw ioe; + } + + // Get hex digest + String sha256MetadataDigest = DatatypeConverter.printHexBinary(sha256.digest()).toLowerCase(); + String sha256MetadataDigestFromTestData = testData.pidData.get(pid).get("metadata_sha256"); + assertEquals(sha256MetadataDigest, sha256MetadataDigestFromTestData); + + // Close stream + metadataCidInputStream.close(); + } + } } diff --git a/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreProtectedTest.java b/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreProtectedTest.java index 56c53422..99d6db25 100644 --- a/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreProtectedTest.java +++ b/src/test/java/org/dataone/hashstore/filehashstore/FileHashStoreProtectedTest.java @@ -1,17 +1,17 @@ package org.dataone.hashstore.filehashstore; -import java.io.ByteArrayOutputStream; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; +import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; -import java.util.Scanner; + +import javax.xml.bind.DatatypeConverter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -155,7 +155,7 @@ public void getHierarchicalPathString() { public void getPidHexDigest() throws Exception { for (String pid : testData.pidList) { String abIdDigest = this.fileHashStore.getPidHexDigest(pid, "SHA-256"); - String abIdTestData = testData.pidData.get(pid).get("s_cid"); + String abIdTestData = testData.pidData.get(pid).get("object_cid"); assertEquals(abIdDigest, abIdTestData); } } @@ -183,8 +183,8 @@ public void putObject_testHarness_id() throws Exception { InputStream dataStream = Files.newInputStream(testDataFile); HashAddress address = fileHashStore.putObject(dataStream, pid, null, null, null); - // Check id (sha-256 hex digest of the ab_id, aka s_cid) - String objAuthorityId = testData.pidData.get(pid).get("s_cid"); + // Check id (sha-256 hex digest of the ab_id, aka object_cid) + String objAuthorityId = testData.pidData.get(pid).get("object_cid"); assertEquals(objAuthorityId, address.getId()); } } @@ -203,7 +203,7 @@ public void putObject_testHarness_relPath() throws Exception { HashAddress address = fileHashStore.putObject(dataStream, pid, null, null, null); // Check relative path - String objAuthorityId = testData.pidData.get(pid).get("s_cid"); + String objAuthorityId = testData.pidData.get(pid).get("object_cid"); String objRelPath = fileHashStore.getHierarchicalPathString(3, 2, objAuthorityId); assertEquals(objRelPath, address.getRelPath()); } @@ -768,13 +768,12 @@ public void writeToTmpMetadataFile() throws Exception { for (String pid : testData.pidList) { File newTmpFile = generateTemporaryFile(); String pidFormatted = pid.replace("/", "_"); - String formatId = (String) this.fhsProperties.get("storeMetadataNamespace"); // Get test metadata file Path testMetaDataFile = testData.getTestFile(pidFormatted + ".xml"); InputStream metadataStream = Files.newInputStream(testMetaDataFile); - boolean metadataWritten = this.fileHashStore.writeToTmpMetadataFile(newTmpFile, metadataStream, formatId); + boolean metadataWritten = this.fileHashStore.writeToTmpMetadataFile(newTmpFile, metadataStream); assertTrue(metadataWritten); } } @@ -790,13 +789,12 @@ public void writeToTmpMetadataFile_tmpFileSize() throws Exception { for (String pid : testData.pidList) { File newTmpFile = generateTemporaryFile(); String pidFormatted = pid.replace("/", "_"); - String formatId = (String) this.fhsProperties.get("storeMetadataNamespace"); // Get test metadata file Path testMetaDataFile = testData.getTestFile(pidFormatted + ".xml"); InputStream metadataStream = Files.newInputStream(testMetaDataFile); - boolean metadataWritten = this.fileHashStore.writeToTmpMetadataFile(newTmpFile, metadataStream, formatId); + boolean metadataWritten = this.fileHashStore.writeToTmpMetadataFile(newTmpFile, metadataStream); assertTrue(metadataWritten); long tmpMetadataFileSize = Files.size(newTmpFile.toPath()); @@ -805,90 +803,48 @@ public void writeToTmpMetadataFile_tmpFileSize() throws Exception { } /** - * Check that tmp metadata written contains correct header - */ - @Test - public void writeToTmpMetadataFile_header() throws Exception { - for (String pid : testData.pidList) { - File newTmpFile = generateTemporaryFile(); - String pidFormatted = pid.replace("/", "_"); - String formatId = (String) this.fhsProperties.get("storeMetadataNamespace"); - - // Get test metadata file - Path testMetaDataFile = testData.getTestFile(pidFormatted + ".xml"); - - InputStream metadataStream = Files.newInputStream(testMetaDataFile); - this.fileHashStore.writeToTmpMetadataFile(newTmpFile, metadataStream, formatId); - - // Read the header - FileInputStream metadataInputStream = new FileInputStream(newTmpFile); - try (Scanner scanner = new Scanner(metadataInputStream, "UTF-8").useDelimiter("\u0000")) { - String header = scanner.next(); - assertEquals(header, formatId); - - } catch (IllegalArgumentException iae) { - iae.printStackTrace(); - throw iae; - - } - - } - } - - /** - * Check that tmp metadata written contains correct body. This test uses two - * approaches when reading the metadata file to cross-verify results. + * Check tmp metadata content */ @Test - public void writeToTmpMetadataFile_body() throws Exception { + public void writeToTmpMetadataFile_metadataContent() throws Exception { for (String pid : testData.pidList) { File newTmpFile = generateTemporaryFile(); String pidFormatted = pid.replace("/", "_"); - String formatId = (String) this.fhsProperties.get("storeMetadataNamespace"); // Get test metadata file Path testMetaDataFile = testData.getTestFile(pidFormatted + ".xml"); // Write it to the tmpFile InputStream metadataStream = Files.newInputStream(testMetaDataFile); - this.fileHashStore.writeToTmpMetadataFile(newTmpFile, metadataStream, formatId); - - // Confirm header and body - try (FileInputStream metadataInputStream = new FileInputStream(newTmpFile)) { - // Read the metadata content manually - ByteArrayOutputStream headerStream = new ByteArrayOutputStream(); - int currentByte; - // The null character that splits the header/body is consumed in this while loop - while ((currentByte = metadataInputStream.read()) != -1 && currentByte != 0) { - headerStream.write(currentByte); - } - String header = headerStream.toString("UTF-8"); - assertEquals(header, formatId); + this.fileHashStore.writeToTmpMetadataFile(newTmpFile, metadataStream); + + // Create InputStream to tmp File + InputStream metadataStoredStream; + try { + metadataStoredStream = Files.newInputStream(newTmpFile.toPath()); + } catch (Exception e) { + e.printStackTrace(); + throw e; + } - ByteArrayOutputStream bodyStream = new ByteArrayOutputStream(); + // Calculate checksum of metadata content + MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); + try { byte[] buffer = new byte[8192]; int bytesRead; - while ((bytesRead = metadataInputStream.read(buffer)) != -1) { - bodyStream.write(buffer, 0, bytesRead); + while ((bytesRead = metadataStoredStream.read(buffer)) != -1) { + sha256.update(buffer, 0, bytesRead); } - String body = bodyStream.toString("UTF-8"); - - // Now confirm the body matches via higher level abstraction class 'Scanner' - InputStream metadataStreamTwo = Files.newInputStream(testMetaDataFile); - try (Scanner scanner = new Scanner(metadataStreamTwo, "UTF-8").useDelimiter("\u0000")) { - String metadataBody = scanner.next(); - assertEquals(metadataBody, body); - - } catch (IllegalArgumentException iae) { - iae.printStackTrace(); - throw iae; - - } - } catch (IOException ioe) { ioe.printStackTrace(); throw ioe; - } + + String sha256Digest = DatatypeConverter.printHexBinary(sha256.digest()).toLowerCase(); + String sha256MetadataDigestFromTestData = testData.pidData.get(pid).get("metadata_sha256"); + assertEquals(sha256Digest, sha256MetadataDigestFromTestData); + + // Close stream + metadataStoredStream.close(); } } } \ No newline at end of file diff --git a/src/test/java/org/dataone/hashstore/testdata/TestDataHarness.java b/src/test/java/org/dataone/hashstore/testdata/TestDataHarness.java index 06682cb6..026469b0 100644 --- a/src/test/java/org/dataone/hashstore/testdata/TestDataHarness.java +++ b/src/test/java/org/dataone/hashstore/testdata/TestDataHarness.java @@ -8,6 +8,12 @@ /* * This class returns the test data expected hex digest values + * + * Notes: + * - "object_cid" is the SHA-256 hash of the pid + * - algorithms without any prefixes are the algorithm hash of the pid's respective data object content + * - "metadata_sha256" is the hash of the pid's respective metadata object content + * */ public class TestDataHarness { public Map> pidData; @@ -18,7 +24,7 @@ public TestDataHarness() { Map> pidsAndHexDigests = new HashMap<>(); Map values1 = new HashMap<>(); - values1.put("s_cid", "0d555ed77052d7e166017f779cbc193357c3a5006ee8b8457230bcf7abcef65e"); + values1.put("object_cid", "0d555ed77052d7e166017f779cbc193357c3a5006ee8b8457230bcf7abcef65e"); values1.put("md2", "b33c730ac5e36b2b886a9cd14552f42e"); values1.put("md5", "db91c910a3202478c8def1071c54aae5"); values1.put("sha1", "1fe86e3c8043afa4c70857ca983d740ad8501ccd"); @@ -28,10 +34,11 @@ public TestDataHarness() { values1.put("sha512", "e9bcd6b91b102ef5803d1bd60c7a5d2dbec1a2baf5f62f7da60de07607ad6797d6a9b740d97a257fd2774f2c26503d455d8f2a03a128773477dfa96ab96a2e54"); values1.put("sha512-224", "107f9facb268471de250625440b6c8b7ff8296fbe5d89bed4a61fd35"); + values1.put("metadata_sha256", "158d7e55c36a810d7c14479c952a4d0b370f2b844808f2ea2b20d7df66768b04"); pidsAndHexDigests.put("doi:10.18739/A2901ZH2M", values1); Map values2 = new HashMap<>(); - values2.put("s_cid", "a8241925740d5dcd719596639e780e0a090c9d55a5d0372b0eaf55ed711d4edf"); + values2.put("object_cid", "a8241925740d5dcd719596639e780e0a090c9d55a5d0372b0eaf55ed711d4edf"); values2.put("md2", "9c25df1c8ba1d2e57bb3fd4785878b85"); values2.put("md5", "f4ea2d07db950873462a064937197b0f"); values2.put("sha1", "3d25436c4490b08a2646e283dada5c60e5c0539d"); @@ -41,10 +48,11 @@ public TestDataHarness() { values2.put("sha512", "bf9e7f4d4e66bd082817d87659d1d57c2220c376cd032ed97cadd481cf40d78dd479cbed14d34d98bae8cebc603b40c633d088751f07155a94468aa59e2ad109"); values2.put("sha512-224", "7a2b22e36ced9e91cf8cdf6971897ec4ae21780e11d1c3903011af33"); + values2.put("metadata_sha256", "d87c386943ceaeba5644c52b23111e4f47972e6530df0e6f0f41964b25855b08"); pidsAndHexDigests.put("jtao.1700.1", values2); Map values3 = new HashMap<>(); - values3.put("s_cid", "7f5cc18f0b04e812a3b4c8f686ce34e6fec558804bf61e54b176742a7f6368d6"); + values3.put("object_cid", "7f5cc18f0b04e812a3b4c8f686ce34e6fec558804bf61e54b176742a7f6368d6"); values3.put("md2", "9f2b06b300f661ce4398006c41d8aa88"); values3.put("md5", "e1932fc75ca94de8b64f1d73dc898079"); values3.put("sha1", "c6d2a69a3f5adaf478ba796c114f57b990cf7ad1"); @@ -54,6 +62,7 @@ public TestDataHarness() { values3.put("sha512", "c7fac7e8aacde8546ddb44c640ad127df82830bba6794aea9952f737c13a81d69095865ab3018ed2a807bf9222f80657faf31cfde6c853d7b91e617e148fec76"); values3.put("sha512-224", "e1789a91c9df334fdf6ee5d295932ad96028c426a18b17016a627099"); + values3.put("metadata_sha256", "27003e07f2ab374020de73298dd24a1d8b1b57647b8fa3c49db00f8c342afa1d"); pidsAndHexDigests.put("urn:uuid:1b35d0a5-b17a-423b-a2ed-de2b18dc367a", values3); this.pidData = pidsAndHexDigests;