Skip to content

Commit

Permalink
Merge branch 'release_24.1' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Sep 19, 2024
2 parents 83a63e7 + e36e66a commit a608f9f
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 7 deletions.
9 changes: 6 additions & 3 deletions lib/galaxy/managers/quotas.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@
Union,
)

from sqlalchemy import select
from sqlalchemy import (
and_,
select,
)

from galaxy import (
model,
Expand Down Expand Up @@ -115,10 +118,10 @@ def _parse_amount(self, amount: str) -> Optional[Union[int, bool]]:
return False

def rename_quota(self, quota, params) -> str:
stmt = select(Quota).where(Quota.name == params.name).limit(1)
stmt = select(Quota).where(and_(Quota.name == params.name, Quota.id != quota.id)).limit(1)
if not params.name:
raise ActionInputError("Enter a valid name.")
elif params.name != quota.name and self.sa_session.scalars(stmt).first():
elif self.sa_session.scalars(stmt).first():
raise ActionInputError("A quota with that name already exists.")
else:
old_name = quota.name
Expand Down
11 changes: 7 additions & 4 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4114,7 +4114,9 @@ class Dataset(Base, StorableObject, Serializable):

non_ready_states = (states.NEW, states.UPLOAD, states.QUEUED, states.RUNNING, states.SETTING_METADATA)
ready_states = tuple(set(states.__members__.values()) - set(non_ready_states))
valid_input_states = tuple(set(states.__members__.values()) - {states.ERROR, states.DISCARDED})
valid_input_states = tuple(
set(states.__members__.values()) - {states.ERROR, states.DISCARDED, states.FAILED_METADATA}
)
no_data_states = (states.PAUSED, states.DEFERRED, states.DISCARDED, *non_ready_states)
terminal_states = (
states.OK,
Expand Down Expand Up @@ -4546,6 +4548,7 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable):
creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]]
copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"]
copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"]
implicitly_converted_datasets: List["ImplicitlyConvertedDatasetAssociation"]

validated_states = DatasetValidatedState

Expand Down Expand Up @@ -4873,9 +4876,9 @@ def display_info(self):
def get_converted_files_by_type(self, file_type):
for assoc in self.implicitly_converted_datasets:
if not assoc.deleted and assoc.type == file_type:
if assoc.dataset:
return assoc.dataset
return assoc.dataset_ldda
item = assoc.dataset or assoc.dataset_ldda
if not item.deleted and item.state in Dataset.valid_input_states:
return item
return None

def get_converted_dataset_deps(self, trans, target_ext):
Expand Down
20 changes: 20 additions & 0 deletions test/integration/test_quota.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,26 @@ def test_update(self):
json_response = show_response.json()
assert json_response["name"] == new_quota_name

def test_update_description(self):
quota_name = "test-update-quota-description"
quota = self._create_quota_with_name(quota_name)
quota_id = quota["id"]

# update description (one needs to specify a name even if should not be changed)
quota_description = "description of test-updated-quota-name"
update_payload = {
"name": quota_name,
"description": quota_description,
}
put_response = self._put(f"quotas/{quota_id}", data=update_payload, json=True)
put_response.raise_for_status()

show_response = self._get(f"quotas/{quota_id}")
show_response.raise_for_status()
json_response = show_response.json()
assert json_response["name"] == quota_name
assert json_response["description"] == quota_description

def test_delete(self):
quota_name = "test-delete-quota"
quota = self._create_quota_with_name(quota_name)
Expand Down

0 comments on commit a608f9f

Please sign in to comment.