Skip to content

Commit

Permalink
Fixes and tests for data fetch models.
Browse files Browse the repository at this point in the history
  • Loading branch information
jmchilton committed Sep 18, 2024
1 parent 5acc518 commit 1978497
Show file tree
Hide file tree
Showing 5 changed files with 282 additions and 0 deletions.
74 changes: 74 additions & 0 deletions client/src/api/schema/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6330,6 +6330,12 @@ export interface components {
CompositeDataElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -6360,6 +6366,8 @@ export interface components {
*/
ext: string;
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Name */
Expand Down Expand Up @@ -8668,10 +8676,26 @@ export interface components {
} & {
[key: string]: unknown;
};
/** FetchDatasetHash */
FetchDatasetHash: {
/**
* Hash Function
* @enum {string}
*/
hash_function: "MD5" | "SHA-1" | "SHA-256" | "SHA-512";
/** Hash Value */
hash_value: string;
};
/** FileDataElement */
FileDataElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -8701,6 +8725,8 @@ export interface components {
*/
ext: string;
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Name */
Expand Down Expand Up @@ -8966,6 +8992,12 @@ export interface components {
FtpImportElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -8997,6 +9029,8 @@ export interface components {
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Ftp Path */
ftp_path: string;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Name */
Expand Down Expand Up @@ -13374,6 +13408,12 @@ export interface components {
NestedElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -13416,6 +13456,8 @@ export interface components {
*/
ext: string;
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Name */
Expand Down Expand Up @@ -14018,6 +14060,12 @@ export interface components {
PastedDataElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -14047,6 +14095,8 @@ export interface components {
*/
ext: string;
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Name */
Expand Down Expand Up @@ -14078,6 +14128,12 @@ export interface components {
PathDataElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -14107,6 +14163,8 @@ export interface components {
*/
ext: string;
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Link Data Only */
Expand Down Expand Up @@ -14799,6 +14857,12 @@ export interface components {
ServerDirElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -14828,6 +14892,8 @@ export interface components {
*/
ext: string;
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Link Data Only */
Expand Down Expand Up @@ -16443,6 +16509,12 @@ export interface components {
UrlDataElement: {
/** Md5 */
MD5?: string | null;
/** Sha-1 */
"SHA-1"?: string | null;
/** Sha-256 */
"SHA-256"?: string | null;
/** Sha-512 */
"SHA-512"?: string | null;
/**
* Auto Decompress
* @description Decompress compressed data before sniffing?
Expand Down Expand Up @@ -16472,6 +16544,8 @@ export interface components {
*/
ext: string;
extra_files?: components["schemas"]["ExtraFiles"] | null;
/** Hashes */
hashes?: components["schemas"]["FetchDatasetHash"][] | null;
/** Info */
info?: string | null;
/** Name */
Expand Down
11 changes: 11 additions & 0 deletions lib/galaxy/schema/fetch_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,13 @@ class ExtraFiles(FetchBaseModel):
)


class FetchDatasetHash(Model):
hash_function: Literal["MD5", "SHA-1", "SHA-256", "SHA-512"]
hash_value: str

model_config = ConfigDict(extra="forbid")


class BaseDataElement(FetchBaseModel):
name: Optional[CoercedStringType] = None
dbkey: str = Field("?")
Expand All @@ -116,6 +123,10 @@ class BaseDataElement(FetchBaseModel):
items_from: Optional[ElementsFromType] = Field(None, alias="elements_from")
collection_type: Optional[str] = None
MD5: Optional[str] = None
SHA1: Optional[str] = Field(None, alias="SHA-1")
SHA256: Optional[str] = Field(None, alias="SHA-256")
SHA512: Optional[str] = Field(None, alias="SHA-512")
hashes: Optional[List[FetchDatasetHash]] = None
description: Optional[str] = None
model_config = ConfigDict(extra="forbid")

Expand Down
4 changes: 4 additions & 0 deletions lib/galaxy/tools/data_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,10 @@ def _resolve_item_with_primary(item):
if url:
sources.append(source_dict)
hashes = item.get("hashes", [])
for hash_function in HASH_NAMES:
hash_value = item.get(hash_function)
if hash_value:
hashes.append({"hash_function": hash_function, "hash_value": hash_value})
for hash_dict in hashes:
hash_function = hash_dict.get("hash_function")
hash_value = hash_dict.get("hash_value")
Expand Down
61 changes: 61 additions & 0 deletions lib/galaxy_test/api/test_tools_upload.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
import os
import urllib.parse
from base64 import b64encode

import pytest
from tusclient import client
Expand All @@ -25,6 +26,9 @@
)
from ._framework import ApiTestCase

B64_FOR_1_2_3 = b64encode(b"1 2 3").decode("utf-8")
URI_FOR_1_2_3 = f"base64://{B64_FOR_1_2_3}"


class TestToolsUpload(ApiTestCase):
dataset_populator: DatasetPopulator
Expand Down Expand Up @@ -927,6 +931,63 @@ def test_upload_and_validate_valid(self):
terminal_validated_state = self.dataset_populator.validate_dataset_and_wait(history_id, dataset_id)
assert terminal_validated_state == "ok", terminal_validated_state

def test_upload_and_validate_hash_valid(self):
with self.dataset_populator.test_history() as history_id:
destination = {"type": "hdas"}
targets = [
{
"destination": destination,
"items": [
{
"src": "url",
"url": URI_FOR_1_2_3,
"hashes": [
{"hash_function": "SHA-1", "hash_value": "65e9d53484d28eef5447bc06fe2d754d1090975a"}
],
},
],
}
]
payload = {
"history_id": history_id,
"targets": targets,
"validate_hashes": True,
}
fetch_response = self.dataset_populator.fetch(payload)
self._assert_status_code_is(fetch_response, 200)
# history ok implies the dataset upload work
self.dataset_populator.wait_for_history(history_id, assert_ok=True)

def test_upload_and_validate_hash_invalid(self):
with self.dataset_populator.test_history() as history_id:
destination = {"type": "hdas"}
targets = [
{
"destination": destination,
"items": [
{
"src": "url",
"url": URI_FOR_1_2_3,
"hashes": [{"hash_function": "SHA-1", "hash_value": "invalidhash"}],
},
],
}
]
payload = {
"history_id": history_id,
"targets": targets,
"validate_hashes": True,
}
fetch_response = self.dataset_populator.fetch(payload, assert_ok=True, wait=False)
self._assert_status_code_is(fetch_response, 200)
outputs = fetch_response.json()["outputs"]
new_dataset = outputs[0]
self.dataset_populator.wait_for_history(history_id, assert_ok=False)
dataset_details = self.dataset_populator.get_history_dataset_details(
history_id, dataset=new_dataset, assert_ok=False
)
assert dataset_details["state"] == "error"

def _velvet_upload(self, history_id, extra_inputs):
payload = self.dataset_populator.upload_payload(
history_id,
Expand Down
Loading

0 comments on commit 1978497

Please sign in to comment.