Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/release_23.1' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
dannon committed Sep 22, 2023
2 parents fc2be5d + 5ff729d commit 75c5977
Show file tree
Hide file tree
Showing 8 changed files with 36 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,17 @@ function createLegend() {
.attr("fill", "black")
.text((d) => props.labelFormatter(d));
// Set the width of the SVG to the width of the widest entry
let maxWidth = 0;
for (const node of entries.nodes()) {
const width = (node as HTMLElement).getBoundingClientRect().width;
maxWidth = Math.max(maxWidth, width);
}
const svg = container.node()?.closest("svg");
if (svg) {
svg.setAttribute("width", `${maxWidth}`);
}
return entries;
}
Expand Down Expand Up @@ -294,7 +305,7 @@ function setTooltipPosition(mouseX: number, mouseY: number): void {
</script>

<template>
<BCard class="mb-3 mx-3">
<BCard class="mb-3">
<template v-slot:header>
<h3 class="text-center my-1">
<slot name="title">
Expand Down Expand Up @@ -337,7 +348,7 @@ function setTooltipPosition(mouseX: number, mouseY: number): void {
}
.bar-chart {
float: right;
float: left;
&:deep(svg) {
overflow: visible;
Expand All @@ -349,12 +360,9 @@ function setTooltipPosition(mouseX: number, mouseY: number): void {
}
.legend {
float: left;
float: right;
height: 400px;
&:deep(svg) {
overflow: visible;
}
overflow: auto;
&:deep(.legend-item) {
font-size: 14px;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"name": "abcdef",
"create_time": "2020-07-02T17:33:56.567412",
"populated_state": "new",
"collection_id": 44,
"collection_id": "b887d74393f85b6d",
"hid": 2,
"job_source_id": "3da1fe6d8d16a505",
"type_id": "dataset_collection-ba03619785539f8c",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,5 @@
"deleted": false,
"populated": true,
"update_time": "2020-06-26T14:22:58.435348",
"collection_id": 23
}
"collection_id": "f09437b8822035f7"
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"tags": [],
"hid": 2,
"url": "/api/histories/f7bb1edd6b95db62/contents/dataset_collections/ba03619785539f8c",
"collection_id": 44,
"collection_id": "b887d74393f85b6d",
"job_state_summary": {
"running": 3,
"waiting": 0,
Expand All @@ -36,4 +36,4 @@
"new": 0
},
"name": "Unzip on data 1"
}
}
6 changes: 6 additions & 0 deletions lib/galaxy/files/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,13 +230,16 @@ class ConfiguredFileSourcesConfig:
def __init__(
self,
symlink_allowlist=None,
fetch_url_allowlist=None,
library_import_dir=None,
user_library_import_dir=None,
ftp_upload_dir=None,
ftp_upload_purge=True,
):
symlink_allowlist = symlink_allowlist or []
fetch_url_allowlist = fetch_url_allowlist or []
self.symlink_allowlist = symlink_allowlist
self.fetch_url_allowlist = fetch_url_allowlist
self.library_import_dir = library_import_dir
self.user_library_import_dir = user_library_import_dir
self.ftp_upload_dir = ftp_upload_dir
Expand All @@ -248,6 +251,7 @@ def from_app_config(config):
# for this component.
kwds = {}
kwds["symlink_allowlist"] = getattr(config, "user_library_import_symlink_allowlist", [])
kwds["fetch_url_allowlist"] = getattr(config, "fetch_url_allowlist", [])
kwds["library_import_dir"] = getattr(config, "library_import_dir", None)
kwds["user_library_import_dir"] = getattr(config, "user_library_import_dir", None)
kwds["ftp_upload_dir"] = getattr(config, "ftp_upload_dir", None)
Expand All @@ -257,6 +261,7 @@ def from_app_config(config):
def to_dict(self):
return {
"symlink_allowlist": self.symlink_allowlist,
"fetch_url_allowlist": self.fetch_url_allowlist,
"library_import_dir": self.library_import_dir,
"user_library_import_dir": self.user_library_import_dir,
"ftp_upload_dir": self.ftp_upload_dir,
Expand All @@ -267,6 +272,7 @@ def to_dict(self):
def from_dict(as_dict):
return ConfiguredFileSourcesConfig(
symlink_allowlist=as_dict["symlink_allowlist"],
fetch_url_allowlist=as_dict["fetch_url_allowlist"],
library_import_dir=as_dict["library_import_dir"],
user_library_import_dir=as_dict["user_library_import_dir"],
ftp_upload_dir=as_dict["ftp_upload_dir"],
Expand Down
8 changes: 7 additions & 1 deletion lib/galaxy/files/sources/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

from typing_extensions import Unpack

from galaxy.files.uris import validate_non_local
from galaxy.util import (
DEFAULT_SOCKET_TIMEOUT,
get_charset_from_http_headers,
Expand Down Expand Up @@ -47,17 +48,22 @@ def __init__(self, **kwd: Unpack[FilesSourceProperties]):
self._url_regex = re.compile(self._url_regex_str)
self._props = props

@property
def _allowlist(self):
return self._file_sources_config.fetch_url_allowlist

def _realize_to(
self, source_path: str, native_path: str, user_context=None, opts: Optional[FilesSourceOptions] = None
):
props = self._serialization_props(user_context)
extra_props: HTTPFilesSourceProperties = cast(HTTPFilesSourceProperties, opts.extra_props or {} if opts else {})
headers = props.pop("http_headers", {}) or {}
headers.update(extra_props.get("http_headers") or {})

req = urllib.request.Request(source_path, headers=headers)

with urllib.request.urlopen(req, timeout=DEFAULT_SOCKET_TIMEOUT) as page:
# Verify url post-redirects is still allowlisted
validate_non_local(page.geturl(), self._allowlist)
f = open(native_path, "wb") # fd will be .close()ed in stream_to_open_named_file
return stream_to_open_named_file(
page, f.fileno(), native_path, source_encoding=get_charset_from_http_headers(page.headers)
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/managers/hdcas.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,7 @@ def add_serializers(self):
"contents_url": self.generate_contents_url,
"job_state_summary": self.serialize_job_state_summary,
"elements_datatypes": self.serialize_elements_datatypes,
"collection_id": self.serialize_id,
}
self.serializers.update(serializers)

Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/managers/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ def replace_dataset_ids(path, key, value):
c = aliased(model.HistoryDatasetAssociation)
d = aliased(model.JobParameter)
e = aliased(model.HistoryDatasetAssociationHistory)
query.add_columns(a.dataset_id)
query = query.add_columns(a.dataset_id)
used_ids.append(a.dataset_id)
query = query.join(a, a.job_id == model.Job.id)
stmt = select(model.HistoryDatasetAssociation.id).where(
Expand Down Expand Up @@ -513,7 +513,7 @@ def replace_dataset_ids(path, key, value):
e = aliased(model.HistoryDatasetAssociation)
query = query.add_columns(a.dataset_collection_element_id)
query = (
query.join(a)
query.join(a, a.job_id == model.Job.id)
.join(b, b.id == a.dataset_collection_element_id)
.join(
c,
Expand Down

0 comments on commit 75c5977

Please sign in to comment.