Skip to content

Commit

Permalink
Merge branch 'release_23.1' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Jun 28, 2023
2 parents 469ff48 + 9b1c317 commit f887311
Show file tree
Hide file tree
Showing 13 changed files with 170 additions and 228 deletions.
121 changes: 63 additions & 58 deletions client/src/components/ToolRecommendation.vue
Original file line number Diff line number Diff line change
Expand Up @@ -86,32 +86,59 @@ export default {
const duration = 750;
const maxTextLength = 20;
const svg = d3.select("#tool-recommendation").append("svg").attr("class", "tree-size").append("g");
const gElem = svg[0][0];
const svgElem = gElem.parentNode;
const svgElem = svg.node().parentElement;
const clientH = svgElem.clientHeight;
const clientW = svgElem.clientWidth;
const translateX = parseInt(clientW * 0.15);
svgElem.setAttribute("viewBox", -translateX + " 0 " + 0.5 * clientW + " " + clientH);
svgElem.setAttribute("preserveAspectRatio", "xMidYMid meet");
const tree = d3.tree().size([clientH, clientW]);
const diagonal = d3.svg.diagonal().projection((d) => {
return [d.y, d.x];
const d3Tree = d3.tree().size([clientH, clientW]);
root = d3.hierarchy(predictedTools, (d) => {
return d.children;
});
root.x0 = parseInt(clientH / 2);
root.y0 = 0;
const collapse = (d) => {
if (d.children) {
d._children = d.children;
d._children.forEach(collapse);
d.children = null;
}
};
root.children.forEach(collapse);
const diagonal = (s, d) => {
const path = `M ${s.y} ${s.x}
C ${(s.y + d.y) / 2} ${s.x},
${(s.y + d.y) / 2} ${d.x},
${d.y} ${d.x}`;
return path;
};
const click = (e, d) => {
if (d.children) {
d._children = d.children;
d.children = null;
} else {
d.children = d._children;
d._children = null;
}
if (d.parent == null) {
update(d);
}
const tId = d.data.id;
if (tId !== undefined && tId !== "undefined" && tId !== null && tId !== "") {
document.location.href = `${getAppRoot()}tool_runner?tool_id=${tId}`;
}
};
const update = (source) => {
// Compute the new tree layout.
const nodes = tree.nodes(root).reverse();
const links = tree.links(nodes);
// Normalize for fixed-depth.
const predictedTools = d3Tree(root);
const nodes = predictedTools.descendants();
const links = predictedTools.descendants().slice(1);
nodes.forEach((d) => {
d.y = d.depth * (clientW / 10);
});
// Update the nodes
const node = svg.selectAll("g.node").data(nodes, (d) => {
return d.id || (d.id = ++i);
});
// Enter any new nodes at the parent's previous position.
const nodeEnter = node
.enter()
.append("g")
Expand All @@ -120,97 +147,75 @@ export default {
return "translate(" + source.y0 + "," + source.x0 + ")";
})
.on("click", click);
nodeEnter.append("circle").attr("r", 1e-6);
nodeEnter.append("circle").attr("class", "node").attr("r", 1e-6);
nodeEnter
.append("text")
.attr("dy", ".35em")
.attr("x", (d) => {
return d.children || d._children ? -10 : 10;
})
.attr("dy", ".35em")
.attr("text-anchor", (d) => {
return d.children || d._children ? "end" : "start";
})
.text((d) => {
const tName = d.name;
const tName = d.data.name;
if (tName.length > maxTextLength) {
return tName.slice(0, maxTextLength) + "...";
}
return d.name;
return d.data.name;
});
nodeEnter.append("title").text((d) => {
return d.children || d._children ? d.name : "Open tool - " + d.name;
return d.children ? d.data.name : "Open tool - " + d.data.name;
});
// Transition nodes to their new position.
const nodeUpdate = node
const nodeUpdate = nodeEnter.merge(node);
nodeUpdate
.transition()
.duration(duration)
.attr("transform", (d) => {
return "translate(" + d.y + "," + d.x + ")";
});
nodeUpdate.select("circle").attr("r", 2.5);
// Transition exiting nodes to the parent's new position.
node.exit()
nodeUpdate.select("circle.node").attr("r", 2.5);
const nodeExit = node
.exit()
.transition()
.duration(duration)
.attr("transform", (d) => {
return "translate(" + source.y + "," + source.x + ")";
})
.remove();
// Update the links
nodeExit.select("circle").attr("r", 1e-6);
const link = svg.selectAll("path.link").data(links, (d) => {
return d.target.id;
return d.data.id;
});
// Enter any new links at the parent's previous position.
link.enter()
const linkEnter = link
.enter()
.insert("path", "g")
.attr("class", "link")
.attr("d", (d) => {
const o = { x: source.x0, y: source.y0 };
return diagonal({ source: o, target: o });
return diagonal(o, o);
});
const linkUpdate = linkEnter.merge(link);
linkUpdate
.transition()
.duration(duration)
.attr("d", (d) => {
return diagonal(d, d.parent);
});
// Transition links to their new position.
link.transition().duration(duration).attr("d", diagonal);
// Transition exiting nodes to the parent's new position.
link.exit()
.transition()
.duration(duration)
.attr("d", (d) => {
const o = { x: source.x, y: source.y };
return diagonal({ source: o, target: o });
return diagonal(o, o);
})
.remove();
// Stash the old positions for transition.
nodes.forEach((d) => {
d.x0 = d.x;
d.y0 = d.y;
});
};
// Toggle children on click.
const click = (d) => {
if (d.children) {
d._children = d.children;
d.children = null;
} else {
d.children = d._children;
d._children = null;
}
update(d);
const tId = d.id;
if (tId !== undefined && tId !== "undefined" && tId !== null && tId !== "") {
document.location.href = `${getAppRoot()}tool_runner?tool_id=${tId}`;
}
};
const collapse = (d) => {
if (d.children) {
d._children = d.children;
d._children.forEach(collapse);
d.children = null;
}
};
root = predictedTools;
root.x0 = parseInt(clientH / 2);
root.y0 = 0;
root.children.forEach(collapse);
update(root);
},
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ function onSkipBoolean(value: boolean) {
if (props.step.when && value === false) {
emit("onUpdateStep", { ...props.step, when: undefined });
} else if (value === true && !props.step.when) {
const when = "${inputs.when}";
const when = "$(inputs.when)";
const newStep = {
...props.step,
when,
Expand Down
12 changes: 6 additions & 6 deletions config/plugins/webhooks/news/script.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@

let currentGalaxyVersion = Galaxy.config.version_major;

// TODO/@hexylena: By 21.01 we will have a proper solution for this. For
// now we'll hardcode the version users 'see'. @hexylena will remove this
// code when she writes the user-facing release notes, and then will file
// an issue for how we'll fix this properly.
if (currentGalaxyVersion == "22.01") {
currentGalaxyVersion = "21.09";
// If we're at the 23.1 release candidate, we want to show the 23.0 release notes still.
// This should be the last release using this hack -- new notification
// system will provide notes moving forward

if (currentGalaxyVersion == "23.1" && Galaxy.config.version_minor.startsWith("rc")) {
currentGalaxyVersion = "23.0";
}

const releaseNotes = `https://docs.galaxyproject.org/en/latest/releases/${currentGalaxyVersion}_announce_user.html`;
Expand Down
16 changes: 6 additions & 10 deletions lib/galaxy/tool_util/verify/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,24 +51,20 @@ def verify(
Throw an informative assertion error if any of these tests fail.
"""
use_default_test_data_resolver = get_filecontent is None
if get_filename is None:
get_filecontent_: Callable[[str], bytes]
if get_filecontent is None:
get_filecontent_ = DEFAULT_TEST_DATA_RESOLVER.get_filecontent
else:
get_filecontent_ = get_filecontent

def get_filename(filename: str) -> str:
file_content = _retrieve_file_content(filename)
file_content = get_filecontent_(filename)
local_name = make_temp_fname(fname=filename)
with open(local_name, "wb") as f:
f.write(file_content)
return local_name

def _retrieve_file_content(filename: str) -> bytes:
if use_default_test_data_resolver:
file_content = DEFAULT_TEST_DATA_RESOLVER.get_filecontent(filename, context=attributes)
else:
assert get_filecontent is not None
file_content = get_filecontent(filename)
return file_content

# Check assertions...
assertions = attributes.get("assert_list", None)
if attributes is not None and assertions is not None:
Expand Down
82 changes: 71 additions & 11 deletions lib/galaxy/tool_util/verify/interactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@
TestCollectionOutputDef,
)
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import (
memory_bound_hexdigest,
parse_checksum_hash,
)
from . import verify
from .asserts import verify_assertions
from .wait import wait_on
Expand Down Expand Up @@ -315,7 +319,7 @@ def wait_for_jobs(self, history_id, jobs, maxseconds):

def verify_output_dataset(self, history_id, hda_id, outfile, attributes, tool_id, tool_version=None):
fetcher = self.__dataset_fetcher(history_id)
test_data_downloader = self.__test_data_downloader(tool_id, tool_version)
test_data_downloader = self.__test_data_downloader(tool_id, tool_version, attributes)
verify_hid(
outfile,
hda_id=hda_id,
Expand Down Expand Up @@ -478,10 +482,7 @@ def test_data_download(self, tool_id, filename, mode="file", is_output=True, too
local_path = self.test_data_path(tool_id, filename, tool_version=tool_version)

if result is None and (local_path is None or not os.path.exists(local_path)):
for test_data_directory in self.test_data_directories:
local_path = os.path.join(test_data_directory, filename)
if os.path.exists(local_path):
break
local_path = self._find_in_test_data_directories(filename)

if result is None and local_path is not None and os.path.exists(local_path):
if mode == "file":
Expand All @@ -503,6 +504,14 @@ def test_data_download(self, tool_id, filename, mode="file", is_output=True, too

return result

def _find_in_test_data_directories(self, filename: str) -> Optional[str]:
local_path = None
for test_data_directory in self.test_data_directories:
local_path = os.path.join(test_data_directory, filename)
if os.path.exists(local_path):
break
return local_path

def __output_id(self, output_data):
# Allow data structure coming out of tools API - {id: <id>, output_name: <name>, etc...}
# or simple id as comes out of workflow API.
Expand Down Expand Up @@ -551,15 +560,24 @@ def stage_data_async(
)
name = test_data["name"]
else:
name = os.path.basename(fname)
file_name = None
file_name_exists = False
location = self._ensure_valid_location_in(test_data)
if fname:
file_name = self.test_data_path(tool_id, fname, tool_version=tool_version)
file_name_exists = os.path.exists(f"{file_name}")
upload_from_location = not file_name_exists and location is not None
name = os.path.basename(location if upload_from_location else fname)
tool_input.update(
{
"files_0|NAME": name,
"files_0|type": "upload_dataset",
}
)
files = {}
if force_path_paste:
if upload_from_location:
tool_input.update({"files_0|url_paste": location})
elif force_path_paste:
file_name = self.test_data_path(tool_id, fname, tool_version=tool_version)
tool_input.update({"files_0|url_paste": f"file://{file_name}"})
else:
Expand All @@ -584,6 +602,13 @@ def stage_data_async(
assert len(jobs) > 0, f"Invalid response from server [{submit_response}], expecting a job."
return lambda: self.wait_for_job(jobs[0]["id"], history_id, maxseconds=maxseconds)

def _ensure_valid_location_in(self, test_data: dict) -> Optional[str]:
location: Optional[str] = test_data.get("location")
has_valid_location = location and util.is_url(location)
if location and not has_valid_location:
raise ValueError(f"Invalid `location` URL: `{location}`")
return location

def run_tool(self, testdef, history_id, resource_parameters=None) -> RunToolResponse:
# We need to handle the case where we've uploaded a valid compressed file since the upload
# tool will have uncompressed it on the fly.
Expand Down Expand Up @@ -825,11 +850,47 @@ def ensure_user_with_email(self, email, password=None):
test_user = self._post("users", data, key=admin_key).json()
return test_user

def __test_data_downloader(self, tool_id, tool_version=None):
def test_data_download(filename, mode="file"):
def __test_data_downloader(self, tool_id, tool_version=None, attributes: Optional[dict] = None):
location = None
checksum = attributes.get("checksum") if attributes else None

def test_data_download_from_galaxy(filename, mode="file"):
return self.test_data_download(tool_id, filename, mode=mode, tool_version=tool_version)

return test_data_download
def test_data_download_from_location(filename: str):
# try to find the file in the test data directories first
local_path = self._find_in_test_data_directories(filename)
if local_path and os.path.exists(local_path):
with open(local_path, mode="rb") as f:
return f.read()
# if not found, try to download it from the location to the test data directory
# to be reused in subsequent tests
if local_path:
util.download_to_file(location, local_path)
self._verify_checksum(local_path, checksum)
with open(local_path, mode="rb") as f:
return f.read()
# otherwise, download it to a temporary file
with tempfile.NamedTemporaryFile() as file_handle:
util.download_to_file(location, file_handle.name)
self._verify_checksum(file_handle.name, checksum)
return file_handle.file.read()

if attributes:
location = self._ensure_valid_location_in(attributes)
if location:
return test_data_download_from_location
return test_data_download_from_galaxy

def _verify_checksum(self, file_path: str, checksum: Optional[str] = None):
if checksum is None:
return
hash_function, expected_hash_value = parse_checksum_hash(checksum)
calculated_hash_value = memory_bound_hexdigest(hash_func_name=hash_function, path=file_path)
if calculated_hash_value != expected_hash_value:
raise AssertionError(
f"Failed to verify checksum with [{hash_function}] - expected [{expected_hash_value}] got [{calculated_hash_value}]"
)

def __dataset_fetcher(self, history_id):
def fetcher(hda_id, base_name=None):
Expand Down Expand Up @@ -1695,7 +1756,6 @@ def test_data_iter(required_files):
ftype=extra.get("ftype", DEFAULT_FTYPE),
dbkey=extra.get("dbkey", DEFAULT_DBKEY),
location=extra.get("location", None),
md5=extra.get("md5", None),
)
edit_attributes = extra.get("edit_attributes", [])

Expand Down
Loading

0 comments on commit f887311

Please sign in to comment.