diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index da341fa446c4..de6a3a472f9d 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -1660,7 +1660,7 @@ def _workflow_to_dict_instance(self, trans, stored, workflow, legacy=True): inputs = {} for step in workflow.input_steps: step_type = step.type - step_label = step.label or step.tool_inputs.get("name") + step_label = step.label or step.tool_inputs and step.tool_inputs.get("name") if step_label: label = step_label elif step_type == "data_input": @@ -1954,7 +1954,7 @@ def __set_default_label(self, step, module, state): to the actual `label` attribute which is available for all module types, unique, and mapped to its own database column. """ if not module.label and module.type in ["data_input", "data_collection_input"]: - new_state = safe_loads(state) + new_state = safe_loads(state) or {} default_label = new_state.get("name") if default_label and util.unicodify(default_label).lower() not in [ "input dataset", diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 5c25e8960f96..36aee6b45441 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -4544,7 +4544,9 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable): creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]] copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"] copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"] + dependent_jobs: List[JobToInputLibraryDatasetAssociation] implicitly_converted_datasets: List["ImplicitlyConvertedDatasetAssociation"] + implicitly_converted_parent_datasets: List["ImplicitlyConvertedDatasetAssociation"] validated_states = DatasetValidatedState diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index a27e8c57b10c..a3bf6a6ec896 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -1023,7 +1023,7 @@ def _reassign_hids(self, object_import_tracker: "ObjectImportTracker", history: if object_import_tracker.copy_hid_for: # in an if to avoid flush if unneeded - for from_dataset, to_dataset in object_import_tracker.copy_hid_for.items(): + for from_dataset, to_dataset in object_import_tracker.copy_hid_for: to_dataset.hid = from_dataset.hid self._session_add(to_dataset) self._flush() @@ -1276,18 +1276,24 @@ def _import_implicit_dataset_conversions(self, object_import_tracker: "ObjectImp metadata_safe = False idc = model.ImplicitlyConvertedDatasetAssociation(metadata_safe=metadata_safe, for_import=True) idc.type = idc_attrs["file_type"] - if idc_attrs.get("parent_hda"): - idc.parent_hda = object_import_tracker.hdas_by_key[idc_attrs["parent_hda"]] + # We may not have exported the parent, so only set the parent_hda attribute if we did. + if (parent_hda_id := idc_attrs.get("parent_hda")) and ( + parent_hda := object_import_tracker.hdas_by_key.get(parent_hda_id) + ): + # exports created prior to 24.2 may not have a parent if the parent had been purged + idc.parent_hda = parent_hda if idc_attrs.get("hda"): idc.dataset = object_import_tracker.hdas_by_key[idc_attrs["hda"]] - # we have a the dataset and the parent, lets ensure they land up with the same HID - if idc.dataset and idc.parent_hda and idc.parent_hda in object_import_tracker.requires_hid: + # we have the dataset and the parent, lets ensure they land up with the same HID + if idc.dataset and idc.parent_hda: try: object_import_tracker.requires_hid.remove(idc.dataset) except ValueError: pass # we wanted to remove it anyway. - object_import_tracker.copy_hid_for[idc.parent_hda] = idc.dataset + # A HDA can be the parent of multiple implicitly converted dataset, + # that's thy we use [(source, target)] here + object_import_tracker.copy_hid_for.append((idc.parent_hda, idc.dataset)) self._session_add(idc) @@ -1370,7 +1376,7 @@ class ObjectImportTracker: hdca_copied_from_sinks: Dict[ObjectKeyType, ObjectKeyType] jobs_by_key: Dict[ObjectKeyType, model.Job] requires_hid: List["HistoryItem"] - copy_hid_for: Dict["HistoryItem", "HistoryItem"] + copy_hid_for: List[Tuple["HistoryItem", "HistoryItem"]] def __init__(self) -> None: self.libraries_by_key = {} @@ -1388,7 +1394,7 @@ def __init__(self) -> None: self.implicit_collection_jobs_by_key: Dict[str, ImplicitCollectionJobs] = {} self.workflows_by_key: Dict[str, model.Workflow] = {} self.requires_hid = [] - self.copy_hid_for = {} + self.copy_hid_for = [] self.new_history: Optional[model.History] = None @@ -2301,6 +2307,14 @@ def add_implicit_conversion_dataset( include_files: bool, conversion: model.ImplicitlyConvertedDatasetAssociation, ) -> None: + parent_hda = conversion.parent_hda + if parent_hda and parent_hda not in self.included_datasets: + # We should always include the parent of an implicit conversion + # to avoid holes in the provenance. + self.included_datasets[parent_hda] = (parent_hda, include_files) + grand_parent_association = parent_hda.implicitly_converted_parent_datasets + if grand_parent_association and (grand_parent_hda := grand_parent_association[0].parent_hda): + self.add_implicit_conversion_dataset(grand_parent_hda, include_files, grand_parent_association[0]) self.included_datasets[dataset] = (dataset, include_files) self.dataset_implicit_conversions[dataset] = conversion diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index 777db9a53bbf..841eea988d49 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -20,6 +20,7 @@ from galaxy import model from galaxy.exceptions import ( + AuthenticationRequired, ItemAccessibilityException, RequestParameterInvalidException, ) @@ -726,14 +727,6 @@ def handle_output(name, output, hidden=None): # Remap any outputs if this is a rerun and the user chose to continue dependent jobs # This functionality requires tracking jobs in the database. if app.config.track_jobs_in_database and rerun_remap_job_id is not None: - # Need to flush here so that referencing outputs by id works - session = trans.sa_session() - try: - session.expire_on_commit = False - with transaction(session): - session.commit() - finally: - session.expire_on_commit = True self._remap_job_on_rerun( trans=trans, galaxy_session=galaxy_session, @@ -774,7 +767,14 @@ def handle_output(name, output, hidden=None): return job, out_data, history - def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current_job, out_data): + def _remap_job_on_rerun( + self, + trans: ProvidesHistoryContext, + galaxy_session: Optional[model.GalaxySession], + rerun_remap_job_id: int, + current_job: Job, + out_data, + ): """ Re-connect dependent datasets for a job that is being rerun (because it failed initially). @@ -782,22 +782,39 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current To be able to resume jobs that depend on this jobs output datasets we change the dependent's job input datasets to be those of the job that is being rerun. """ + old_job = trans.sa_session.get(Job, rerun_remap_job_id) + if not old_job: + # I don't think that can really happen + raise RequestParameterInvalidException("rerun_remap_job_id parameter is invalid") + old_tool = trans.app.toolbox.get_tool(old_job.tool_id, exact=False) + new_tool = trans.app.toolbox.get_tool(current_job.tool_id, exact=False) + if old_tool and new_tool and old_tool.old_id != new_tool.old_id: + # If we currently only have the old or new tool installed we'll find the other tool anyway with `exact=False`. + # If we don't have the tool at all we'll fail anyway, no need to worry here. + raise RequestParameterInvalidException( + f"Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" + ) + if trans.user is not None: + if old_job.user_id != trans.user.id: + raise RequestParameterInvalidException( + "Cannot remap job dependencies for job not created by current user." + ) + elif trans.user is None and galaxy_session: + if old_job.session_id != galaxy_session.id: + raise RequestParameterInvalidException( + "Cannot remap job dependencies for job not created by current user." + ) + else: + raise AuthenticationRequired("Authentication required to remap job dependencies") + # Need to flush here so that referencing outputs by id works + session = trans.sa_session() + try: + session.expire_on_commit = False + with transaction(session): + session.commit() + finally: + session.expire_on_commit = True try: - old_job = trans.sa_session.get(Job, rerun_remap_job_id) - assert old_job is not None, f"({rerun_remap_job_id}/{current_job.id}): Old job id is invalid" - assert ( - old_job.tool_id == current_job.tool_id - ), f"({old_job.id}/{current_job.id}): Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" - if trans.user is not None: - assert ( - old_job.user_id == trans.user.id - ), f"({old_job.id}/{current_job.id}): Old user id ({old_job.user_id}) does not match rerun user id ({trans.user.id})" - elif trans.user is None and isinstance(galaxy_session, trans.model.GalaxySession): - assert ( - old_job.session_id == galaxy_session.id - ), f"({old_job.id}/{current_job.id}): Old session id ({old_job.session_id}) does not match rerun session id ({galaxy_session.id})" - else: - raise Exception(f"({old_job.id}/{current_job.id}): Remapping via the API is not (yet) supported") # Start by hiding current job outputs before taking over the old job's (implicit) outputs. current_job.hide_outputs(flush=False) # Duplicate PJAs before remap. @@ -819,7 +836,7 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current for jtod in old_job.output_datasets: for job_to_remap, jtid in [(jtid.job, jtid) for jtid in jtod.dataset.dependent_jobs]: if (trans.user is not None and job_to_remap.user_id == trans.user.id) or ( - trans.user is None and job_to_remap.session_id == galaxy_session.id + trans.user is None and galaxy_session and job_to_remap.session_id == galaxy_session.id ): self.__remap_parameters(job_to_remap, jtid, jtod, out_data) trans.sa_session.add(job_to_remap) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index c33183430771..1a099bfe7061 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1468,8 +1468,8 @@ def get_column_list(self, trans, other_values): # Use representative dataset if a dataset collection is parsed if isinstance(dataset, HistoryDatasetCollectionAssociation): dataset = dataset.to_hda_representative() - if isinstance(dataset, DatasetCollectionElement) and dataset.hda: - dataset = dataset.hda + if isinstance(dataset, DatasetCollectionElement): + dataset = dataset.first_dataset_instance() if isinstance(dataset, HistoryDatasetAssociation) and self.ref_input and self.ref_input.formats: direct_match, target_ext, converted_dataset = dataset.find_conversion_destination( self.ref_input.formats @@ -1561,9 +1561,13 @@ def is_file_empty(self, trans, other_values): for dataset in util.listify(other_values.get(self.data_ref)): # Use representative dataset if a dataset collection is parsed if isinstance(dataset, HistoryDatasetCollectionAssociation): - dataset = dataset.to_hda_representative() + if dataset.populated: + dataset = dataset.to_hda_representative() + else: + # That's fine, we'll check again on execution + return True if isinstance(dataset, DatasetCollectionElement): - dataset = dataset.hda + dataset = dataset.first_dataset_instance() if isinstance(dataset, DatasetInstance): return not dataset.has_data() if is_runtime_value(dataset): diff --git a/lib/galaxy_test/api/test_jobs.py b/lib/galaxy_test/api/test_jobs.py index 82f9ecbab416..c904e808f70f 100644 --- a/lib/galaxy_test/api/test_jobs.py +++ b/lib/galaxy_test/api/test_jobs.py @@ -464,6 +464,32 @@ def test_no_hide_on_rerun(self): assert hdca["visible"] assert isoparse(hdca["update_time"]) > (isoparse(first_update_time)) + def test_rerun_exception_handling(self): + with self.dataset_populator.test_history() as history_id: + other_run_response = self.dataset_populator.run_tool( + tool_id="job_properties", + inputs={}, + history_id=history_id, + ) + unrelated_job_id = other_run_response["jobs"][0]["id"] + run_response = self._run_map_over_error(history_id) + job_id = run_response["jobs"][0]["id"] + self.dataset_populator.wait_for_job(job_id) + failed_hdca = self.dataset_populator.get_history_collection_details( + history_id=history_id, + content_id=run_response["implicit_collections"][0]["id"], + assert_ok=False, + ) + assert failed_hdca["visible"] + rerun_params = self._get(f"jobs/{job_id}/build_for_rerun").json() + inputs = rerun_params["state_inputs"] + inputs["rerun_remap_job_id"] = unrelated_job_id + before_rerun_items = self.dataset_populator.get_history_contents(history_id) + rerun_response = self._run_detect_errors(history_id=history_id, inputs=inputs) + assert "does not match rerun tool id" in rerun_response["err_msg"] + after_rerun_items = self.dataset_populator.get_history_contents(history_id) + assert len(before_rerun_items) == len(after_rerun_items) + @skip_without_tool("empty_output") def test_common_problems(self): with self.dataset_populator.test_history() as history_id: diff --git a/lib/galaxy_test/api/test_tools.py b/lib/galaxy_test/api/test_tools.py index a8f0cb1ebc34..8a29c39aaa2b 100644 --- a/lib/galaxy_test/api/test_tools.py +++ b/lib/galaxy_test/api/test_tools.py @@ -2578,6 +2578,36 @@ def test_implicit_reduce_with_mapping(self): ) assert output_hdca["collection_type"] == "list" + @skip_without_tool("column_multi_param") + def test_multi_param_column_nested_list(self): + with self.dataset_populator.test_history() as history_id: + hdca = self.dataset_collection_populator.create_list_of_list_in_history( + history_id, ext="tabular", wait=True + ).json() + inputs = { + "input1": {"src": "hdca", "id": hdca["id"]}, + # FIXME: integers don't work here + "col": "1", + } + response = self._run("column_multi_param", history_id, inputs, assert_ok=True) + self.dataset_populator.wait_for_job(job_id=response["jobs"][0]["id"], assert_ok=True) + + @skip_without_tool("column_multi_param") + def test_multi_param_column_nested_list_fails_on_invalid_column(self): + with self.dataset_populator.test_history() as history_id: + hdca = self.dataset_collection_populator.create_list_of_list_in_history( + history_id, ext="tabular", wait=True + ).json() + inputs = { + "input1": {"src": "hdca", "id": hdca["id"]}, + "col": "10", + } + try: + self._run("column_multi_param", history_id, inputs, assert_ok=True) + except AssertionError as e: + exception_raised = e + assert exception_raised, "Expected invalid column selection to fail job" + @skip_without_tool("column_multi_param") def test_implicit_conversion_and_reduce(self): with self.dataset_populator.test_history() as history_id: diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 98dc9cb5215f..82b458d0068e 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -2906,7 +2906,7 @@ def __create_payload(self, history_id: str, *args, **kwds): else: return self.__create_payload_collection(history_id, *args, **kwds) - def __create_payload_fetch(self, history_id: str, collection_type, **kwds): + def __create_payload_fetch(self, history_id: str, collection_type, ext="txt", **kwds): contents = None if "contents" in kwds: contents = kwds["contents"] @@ -2928,7 +2928,7 @@ def __create_payload_fetch(self, history_id: str, collection_type, **kwds): elements.append(contents_level) continue - element = {"src": "pasted", "ext": "txt"} + element = {"src": "pasted", "ext": ext} # Else older style list of contents or element ID and contents, # convert to fetch API. if isinstance(contents_level, tuple): diff --git a/packages/app/HISTORY.rst b/packages/app/HISTORY.rst index 09d4be63a792..ea152aca9bc8 100644 --- a/packages/app/HISTORY.rst +++ b/packages/app/HISTORY.rst @@ -9,6 +9,57 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Strip unicode null from tool stdio by `@mvdbeek `_ in `#18527 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Also fail ``ensure_dataset_on_disk`` if dataset is in new state by `@mvdbeek `_ in `#18559 `_ +* Fix sqlalchemy statement in tooltagmanager reset output by `@dannon `_ in `#18591 `_ +* Set minimum weasyprint version by `@mvdbeek `_ in `#18606 `_ +* Improve relabel identifiers message when number of columns is not 2 by `@mvdbeek `_ in `#18634 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fill in missing help for cross product tools. by `@jmchilton `_ in `#18698 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix directory get or create logic by `@mvdbeek `_ in `#18752 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Allow to change only the description of a quota by `@bernt-matthias `_ in `#18775 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix unspecified ``oidc_endpoint`` variable overwriting specified ``redirect_url`` by `@bgruening `_ in `#18818 `_ +* Fix wrong celery_app config on job and workflow handlers by `@mvdbeek `_ in `#18819 `_ +* Fix ``named cursor is not valid anymore`` by `@mvdbeek `_ in `#18825 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Skip metric collection if job working directory doesn't exist by `@mvdbeek `_ in `#18845 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ +* Fix loading very old workflows with data inputs by `@mvdbeek `_ in `#18876 `_ + +============ +Enhancements +============ + +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ +* Collect job metrics also when job failed by `@mvdbeek `_ in `#18809 `_ +* prevent "missing refresh_token" errors by supporting also with Keycloak backend by `@ljocha `_ in `#18826 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/auth/HISTORY.rst b/packages/auth/HISTORY.rst index ae02cb251cda..5d9a1f321ef1 100644 --- a/packages/auth/HISTORY.rst +++ b/packages/auth/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/config/HISTORY.rst b/packages/config/HISTORY.rst index 327bc9cea77b..0d24cbb05150 100644 --- a/packages/config/HISTORY.rst +++ b/packages/config/HISTORY.rst @@ -9,6 +9,32 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Strip whitespace when listifying admin users by `@jdavcs `_ in `#18656 `_ + +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ +* More datatype deprecation warnings by `@mvdbeek `_ in `#18612 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ + +============= +Other changes +============= + +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ +* Backport PR 18630 "Add BlobToolkit to the list of interactive tools" to release_24.1 by `@cat-bro `_ in `#18784 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/data/HISTORY.rst b/packages/data/HISTORY.rst index a50cce52863c..f3b5913f9da8 100644 --- a/packages/data/HISTORY.rst +++ b/packages/data/HISTORY.rst @@ -9,6 +9,38 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ +* Also check dataset.deleted when determining if data can be displayed by `@mvdbeek `_ in `#18547 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Fix h5ad metadata by `@nilchia `_ in `#18635 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Make pylibmagic import optional by `@mvdbeek `_ in `#18813 `_ +* Ignore converted datasets in invalid input states by `@mvdbeek `_ in `#18850 `_ +* Fix discovered outputs with directory metadata and distributed object by `@mvdbeek `_ in `#18855 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix wrong final state when init_from is used by `@mvdbeek `_ in `#18871 `_ +* Fix history import when parent_hda not serialized by `@mvdbeek `_ in `#18873 `_ + +============= +Other changes +============= + +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/files/HISTORY.rst b/packages/files/HISTORY.rst index 356f458d7c5a..8bc4ff5427ab 100644 --- a/packages/files/HISTORY.rst +++ b/packages/files/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/job_execution/HISTORY.rst b/packages/job_execution/HISTORY.rst index a94919c3e982..2153ce10970b 100644 --- a/packages/job_execution/HISTORY.rst +++ b/packages/job_execution/HISTORY.rst @@ -9,6 +9,17 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Retry container monitor POST if it fails (don't assume it succeeded) by `@natefoo `_ in `#18863 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/job_metrics/HISTORY.rst b/packages/job_metrics/HISTORY.rst index 1501175574b6..d6a048415a20 100644 --- a/packages/job_metrics/HISTORY.rst +++ b/packages/job_metrics/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/navigation/HISTORY.rst b/packages/navigation/HISTORY.rst index 945b382e4c50..65a78836ad82 100644 --- a/packages/navigation/HISTORY.rst +++ b/packages/navigation/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/objectstore/HISTORY.rst b/packages/objectstore/HISTORY.rst index 2174755b4f24..7a28dbaa4557 100644 --- a/packages/objectstore/HISTORY.rst +++ b/packages/objectstore/HISTORY.rst @@ -9,6 +9,17 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst index d746fe40ad45..3bf84da813cc 100644 --- a/packages/schema/HISTORY.rst +++ b/packages/schema/HISTORY.rst @@ -9,6 +9,28 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Handle error when workflow is unowned in Invocation view by `@ahmedhamidawan `_ in `#18730 `_ +* Fix datatype validation of newly built collection by `@mvdbeek `_ in `#18738 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Fix ``TypeError`` from Pydantic 2.9.0 by `@nsoranzo `_ in `#18788 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Make all fields optional for HelpForumPost by `@davelopez `_ in `#18839 `_ + +============ +Enhancements +============ + +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/selenium/HISTORY.rst b/packages/selenium/HISTORY.rst index 7e4d745b7eda..d1b195a7f953 100644 --- a/packages/selenium/HISTORY.rst +++ b/packages/selenium/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/test_api/HISTORY.rst b/packages/test_api/HISTORY.rst index fd2142439fec..75536b62bb90 100644 --- a/packages/test_api/HISTORY.rst +++ b/packages/test_api/HISTORY.rst @@ -9,6 +9,29 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/test_base/HISTORY.rst b/packages/test_base/HISTORY.rst index 02265af284d7..e2fbc283b025 100644 --- a/packages/test_base/HISTORY.rst +++ b/packages/test_base/HISTORY.rst @@ -9,6 +9,19 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/test_driver/HISTORY.rst b/packages/test_driver/HISTORY.rst index e314c57caf9e..b568c88d14e3 100644 --- a/packages/test_driver/HISTORY.rst +++ b/packages/test_driver/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst index a238e7eb849c..19ef42776f91 100644 --- a/packages/tool_shed/HISTORY.rst +++ b/packages/tool_shed/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/tool_util/HISTORY.rst b/packages/tool_util/HISTORY.rst index 83c56edb8548..8210bdf311bc 100644 --- a/packages/tool_util/HISTORY.rst +++ b/packages/tool_util/HISTORY.rst @@ -9,6 +9,24 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Handle all requests error in ``ApiBiotoolsMetadataSource._raw_get_metadata`` by `@nsoranzo `_ in `#18510 `_ +* xsd: allow `change_format` and `actions` also in statically defined collection elements, and break recursion by `@bernt-matthias `_ in `#18605 `_ + +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/tours/HISTORY.rst b/packages/tours/HISTORY.rst index 17a158839c29..5841b78a1b2a 100644 --- a/packages/tours/HISTORY.rst +++ b/packages/tours/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/util/HISTORY.rst b/packages/util/HISTORY.rst index ee1512d490a3..99e03538b67a 100644 --- a/packages/util/HISTORY.rst +++ b/packages/util/HISTORY.rst @@ -9,6 +9,24 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ + +============ +Enhancements +============ + +* Use smtplib send_message to support utf-8 chars in to and from by `@mvdbeek `_ in `#18805 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/web_apps/HISTORY.rst b/packages/web_apps/HISTORY.rst index 470b054011de..d60e3b0910f7 100644 --- a/packages/web_apps/HISTORY.rst +++ b/packages/web_apps/HISTORY.rst @@ -9,6 +9,44 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Check if dataset has any data before running provider checks by `@mvdbeek `_ in `#18526 `_ +* Raise appropriate exception if ldda not found by `@mvdbeek `_ in `#18569 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ +* Fix resume_paused_jobs if no session provided by `@mvdbeek `_ in `#18640 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Return error when following a link to a non-ready display application by `@mvdbeek `_ in `#18672 `_ +* Only load authnz routes when oidc enabled by `@mvdbeek `_ in `#18683 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix sorting users in admin by last login by `@jdavcs `_ in `#18694 `_ +* Fix resume paused jobs response handling by `@dannon `_ in `#18733 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ + +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ + +============= +Other changes +============= + +* Fix check dataset check by `@mvdbeek `_ in `#18856 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/web_framework/HISTORY.rst b/packages/web_framework/HISTORY.rst index 6ef26301eff3..4aeb57285e9d 100644 --- a/packages/web_framework/HISTORY.rst +++ b/packages/web_framework/HISTORY.rst @@ -9,6 +9,19 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Apply statsd arg sanitization to all pages by `@mvdbeek `_ in `#18509 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/web_stack/HISTORY.rst b/packages/web_stack/HISTORY.rst index 8271f6fa0196..ddde231f1d27 100644 --- a/packages/web_stack/HISTORY.rst +++ b/packages/web_stack/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/test/unit/data/model/test_model_store.py b/test/unit/data/model/test_model_store.py index 80dd3789ef1b..99dab6ddcb9e 100644 --- a/test/unit/data/model/test_model_store.py +++ b/test/unit/data/model/test_model_store.py @@ -122,32 +122,80 @@ def test_import_export_history_allow_discarded_data(): assert imported_job.output_datasets[0].dataset == datasets[1] -def test_import_export_history_with_implicit_conversion(): +def setup_history_with_implicit_conversion(): app = _mock_app() u, h, d1, d2, j = _setup_simple_cat_job(app) + intermediate_ext = "bam" + intermediate_implicit_hda = model.HistoryDatasetAssociation( + extension=intermediate_ext, create_dataset=True, flush=False, history=h + ) + intermediate_implicit_hda.hid = d2.hid convert_ext = "fasta" implicit_hda = model.HistoryDatasetAssociation(extension=convert_ext, create_dataset=True, flush=False, history=h) implicit_hda.hid = d2.hid # this adds and flushes the result... - d2.attach_implicitly_converted_dataset(app.model.context, implicit_hda, convert_ext) + intermediate_implicit_hda.attach_implicitly_converted_dataset(app.model.context, implicit_hda, convert_ext) + d2.attach_implicitly_converted_dataset(app.model.context, intermediate_implicit_hda, intermediate_ext) + + app.object_store.update_from_file(intermediate_implicit_hda.dataset, file_name=TEST_PATH_2_CONVERTED, create=True) app.object_store.update_from_file(implicit_hda.dataset, file_name=TEST_PATH_2_CONVERTED, create=True) - assert len(h.active_datasets) == 3 + assert len(h.active_datasets) == 4 + return app, h, implicit_hda + + +def test_import_export_history_with_implicit_conversion(): + app, h, _ = setup_history_with_implicit_conversion() imported_history = _import_export_history(app, h, export_files="copy", include_hidden=True) - assert len(imported_history.active_datasets) == 3 + assert len(imported_history.active_datasets) == 4 recovered_hda_2 = imported_history.active_datasets[1] assert recovered_hda_2.implicitly_converted_datasets - imported_conversion = recovered_hda_2.implicitly_converted_datasets[0] - assert imported_conversion.type == "fasta" - assert imported_conversion.dataset == imported_history.active_datasets[2] + intermediate_conversion = recovered_hda_2.implicitly_converted_datasets[0] + assert intermediate_conversion.type == "bam" + intermediate_hda = intermediate_conversion.dataset + assert intermediate_hda.implicitly_converted_datasets + final_conversion = intermediate_hda.implicitly_converted_datasets[0] + + assert final_conversion.type == "fasta" + assert final_conversion.dataset == imported_history.active_datasets[-1] # implicit conversions have the same HID... ensure this property is recovered... assert imported_history.active_datasets[2].hid == imported_history.active_datasets[1].hid +def test_import_export_history_with_implicit_conversion_parents_purged(): + app, h, implicit_hda = setup_history_with_implicit_conversion() + # Purge parents + parent = implicit_hda.implicitly_converted_parent_datasets[0].parent_hda + parent.dataset.purged = True + grandparent = parent.implicitly_converted_parent_datasets[0].parent_hda + grandparent.dataset.purged = True + app.model.context.commit() + imported_history = _import_export_history(app, h, export_files="copy", include_hidden=True) + + assert len(imported_history.active_datasets) == 2 + assert len(imported_history.datasets) == 4 + imported_implicit_hda = imported_history.active_datasets[1] + assert imported_implicit_hda.extension == "fasta" + + # implicit conversions have the same HID... ensure this property is recovered... + assert imported_implicit_hda.hid == implicit_hda.hid + assert imported_implicit_hda.implicitly_converted_parent_datasets + intermediate_implicit_conversion = imported_implicit_hda.implicitly_converted_parent_datasets[0] + intermediate_hda = intermediate_implicit_conversion.parent_hda + assert intermediate_hda.hid == implicit_hda.hid + assert intermediate_hda.extension == "bam" + assert intermediate_hda.implicitly_converted_datasets + assert intermediate_hda.implicitly_converted_parent_datasets + first_implicit_conversion = intermediate_hda.implicitly_converted_parent_datasets[0] + source_hda = first_implicit_conversion.parent_hda + assert source_hda.hid == implicit_hda.hid + assert source_hda.extension == "txt" + + def test_import_export_history_with_implicit_conversion_and_extra_files(): app = _mock_app()