diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index 0d622b7461b6..e7abc7c35ee1 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -582,8 +582,6 @@ def __init__(self, configure_logging=True, use_converters=True, use_display_appl self._configure_tool_shed_registry() self._register_singleton(tool_shed_registry.Registry, self.tool_shed_registry) - # Tool Data Tables - self._configure_tool_data_tables(from_shed_config=False) def _configure_tool_shed_registry(self) -> None: # Set up the tool sheds registry @@ -638,6 +636,8 @@ def __init__(self, **kwargs) -> None: ) self.api_keys_manager = self._register_singleton(ApiKeyManager) + # Tool Data Tables + self._configure_tool_data_tables(from_shed_config=False) # Load dbkey / genome build manager self._configure_genome_builds(data_table_name="__dbkeys__", load_old_style=True) diff --git a/lib/galaxy/celery/tasks.py b/lib/galaxy/celery/tasks.py index 31379c56b40a..f25a81b7cefc 100644 --- a/lib/galaxy/celery/tasks.py +++ b/lib/galaxy/celery/tasks.py @@ -38,6 +38,7 @@ from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.objectstore import BaseObjectStore from galaxy.objectstore.caching import check_caches +from galaxy.queue_worker import GalaxyQueueWorker from galaxy.schema.tasks import ( ComputeDatasetHashTaskRequest, GenerateHistoryContentDownload, @@ -63,6 +64,11 @@ log = get_logger(__name__) +@lru_cache() +def setup_data_table_manager(app): + app._configure_tool_data_tables(from_shed_config=False) + + @lru_cache() def cached_create_tool_from_representation(app, raw_tool_source): return create_tool_from_representation( @@ -381,6 +387,7 @@ def compute_dataset_hash( @galaxy_task(action="import a data bundle") def import_data_bundle( + app: MinimalManagerApp, hda_manager: HDAManager, ldda_manager: LDDAManager, tool_data_import_manager: ToolDataImportManager, @@ -390,6 +397,7 @@ def import_data_bundle( id: Optional[int] = None, tool_data_file_path: Optional[str] = None, ): + setup_data_table_manager(app) if src == "uri": assert uri tool_data_import_manager.import_data_bundle_by_uri(config, uri, tool_data_file_path=tool_data_file_path) @@ -401,6 +409,8 @@ def import_data_bundle( else: dataset = ldda_manager.by_id(id) tool_data_import_manager.import_data_bundle_by_dataset(config, dataset, tool_data_file_path=tool_data_file_path) + queue_worker = GalaxyQueueWorker(app) + queue_worker.send_control_task("reload_tool_data_tables") @galaxy_task(action="pruning history audit table") diff --git a/test/integration/test_tool_data_delete.py b/test/integration/test_tool_data_delete.py index 32f6430fc41a..6104aef2bfdf 100644 --- a/test/integration/test_tool_data_delete.py +++ b/test/integration/test_tool_data_delete.py @@ -45,6 +45,8 @@ def _testbase_fields(self): return show_response.json()["fields"] def _testbeta_field_count(self) -> int: + # We need to wait for the reload message to reach the control message consumer + time.sleep(1) return len(self._testbase_fields())