diff --git a/lib/galaxy/managers/quotas.py b/lib/galaxy/managers/quotas.py index 31e7fcc48e6d..8b91b94a94a8 100644 --- a/lib/galaxy/managers/quotas.py +++ b/lib/galaxy/managers/quotas.py @@ -12,7 +12,10 @@ Union, ) -from sqlalchemy import select +from sqlalchemy import ( + and_, + select, +) from galaxy import ( model, @@ -115,10 +118,10 @@ def _parse_amount(self, amount: str) -> Optional[Union[int, bool]]: return False def rename_quota(self, quota, params) -> str: - stmt = select(Quota).where(Quota.name == params.name).limit(1) + stmt = select(Quota).where(and_(Quota.name == params.name, Quota.id != quota.id)).limit(1) if not params.name: raise ActionInputError("Enter a valid name.") - elif params.name != quota.name and self.sa_session.scalars(stmt).first(): + elif self.sa_session.scalars(stmt).first(): raise ActionInputError("A quota with that name already exists.") else: old_name = quota.name diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 8623aa442e90..b1bb69b58ea0 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -4114,7 +4114,9 @@ class Dataset(Base, StorableObject, Serializable): non_ready_states = (states.NEW, states.UPLOAD, states.QUEUED, states.RUNNING, states.SETTING_METADATA) ready_states = tuple(set(states.__members__.values()) - set(non_ready_states)) - valid_input_states = tuple(set(states.__members__.values()) - {states.ERROR, states.DISCARDED}) + valid_input_states = tuple( + set(states.__members__.values()) - {states.ERROR, states.DISCARDED, states.FAILED_METADATA} + ) no_data_states = (states.PAUSED, states.DEFERRED, states.DISCARDED, *non_ready_states) terminal_states = ( states.OK, @@ -4546,6 +4548,7 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable): creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]] copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"] copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"] + implicitly_converted_datasets: List["ImplicitlyConvertedDatasetAssociation"] validated_states = DatasetValidatedState @@ -4873,9 +4876,9 @@ def display_info(self): def get_converted_files_by_type(self, file_type): for assoc in self.implicitly_converted_datasets: if not assoc.deleted and assoc.type == file_type: - if assoc.dataset: - return assoc.dataset - return assoc.dataset_ldda + item = assoc.dataset or assoc.dataset_ldda + if not item.deleted and item.state in Dataset.valid_input_states: + return item return None def get_converted_dataset_deps(self, trans, target_ext): diff --git a/test/integration/test_quota.py b/test/integration/test_quota.py index 07137786a353..0debde6ac627 100644 --- a/test/integration/test_quota.py +++ b/test/integration/test_quota.py @@ -85,6 +85,26 @@ def test_update(self): json_response = show_response.json() assert json_response["name"] == new_quota_name + def test_update_description(self): + quota_name = "test-update-quota-description" + quota = self._create_quota_with_name(quota_name) + quota_id = quota["id"] + + # update description (one needs to specify a name even if should not be changed) + quota_description = "description of test-updated-quota-name" + update_payload = { + "name": quota_name, + "description": quota_description, + } + put_response = self._put(f"quotas/{quota_id}", data=update_payload, json=True) + put_response.raise_for_status() + + show_response = self._get(f"quotas/{quota_id}") + show_response.raise_for_status() + json_response = show_response.json() + assert json_response["name"] == quota_name + assert json_response["description"] == quota_description + def test_delete(self): quota_name = "test-delete-quota" quota = self._create_quota_with_name(quota_name)