diff --git a/app/routing/route_studies.py b/app/routing/route_studies.py index 49c47e38..2af2755a 100644 --- a/app/routing/route_studies.py +++ b/app/routing/route_studies.py @@ -13,8 +13,6 @@ from pathlib import Path from typing import Dict, Union -import pyfakefs - # App-specific includes import common.config as config import common.helper as helper @@ -36,7 +34,7 @@ def route_studies(pending_series: Dict[str, float]) -> None: # TODO: Handle studies that exceed the "force completion" timeout in the "CONDITION_RECEIVED_SERIES" mode studies_ready = {} with os.scandir(config.mercure.studies_folder) as it: - it = list(it) # type: ignore + it = list(it) # type: ignore for entry in it: if entry.is_dir() and not is_study_locked(entry.path): if is_study_complete(entry.path, pending_series): @@ -198,7 +196,7 @@ def check_force_study_timeout(folder: Path) -> bool: lock_file = Path(folder / mercure_names.LOCK) try: lock = helper.FileLock(lock_file) - except: + except Exception: logger.error(f"Unable to lock study for removal {lock_file}") # handle_error return False if not move_study_folder(task.id, folder.name, "DISCARD"): diff --git a/app/tests/__init__.py b/app/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/app/tests/conftest.py b/app/tests/conftest.py new file mode 100644 index 00000000..0ef3a9b7 --- /dev/null +++ b/app/tests/conftest.py @@ -0,0 +1,112 @@ + +import os +import socket +import uuid +from typing import Any, Callable, Dict + +import common # noqa: F401 +import common.config as config +import process # noqa: F401 +import pytest +import routing # noqa: F401 +from bookkeeping import bookkeeper +from common.types import Config + + +def spy_on(mocker, obj) -> None: + pieces = obj.split(".") + module = ".".join(pieces[0:-1]) + mocker.patch(obj, new=mocker.spy(eval(module), pieces[-1])) + + +def spies(mocker, list_of_spies) -> None: + for spy in list_of_spies: + spy_on(mocker, spy) + + +def attach_spies(mocker) -> None: + spies( + mocker, + [ + "routing.route_series.push_series_serieslevel", + "routing.route_series.push_serieslevel_outgoing", + "routing.route_studies.route_study", + "routing.generate_taskfile.create_series_task", + "routing.route_studies.move_study_folder", + "routing.route_studies.push_studylevel_error", + "routing.generate_taskfile.create_study_task", + "routing.router.route_series", + "routing.router.route_studies", + "process.processor.process_series", + # "process.process_series", + "common.monitor.post", + "common.monitor.send_event", + "common.monitor.send_register_series", + "common.monitor.send_register_task", + "common.monitor.send_task_event", + "common.monitor.async_send_task_event", + "common.monitor.send_processor_output", + "common.monitor.send_update_task", + "common.notification.trigger_notification_for_rule", + "common.notification.send_email", + "uuid.uuid1" + ], + ) + + +@pytest.fixture(scope="function") +def mocked(mocker): + mocker.resetall() + attach_spies(mocker) + return mocker + + +@pytest.fixture(scope="module") +def bookkeeper_port(): + return random_port() + + +@pytest.fixture(scope="module") +def receiver_port(): + return random_port() + + +@pytest.fixture(scope="function", autouse=True) +def mercure_config(fs, bookkeeper_port) -> Callable[[Dict], Config]: + # TODO: config from previous calls seems to leak in here + config_path = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + "/data/test_config.json") + + fs.add_real_file(config_path, target_path=config.configuration_filename, read_only=False) + for k in ["incoming", "studies", "outgoing", "success", "error", "discard", "processing", "jobs"]: + fs.create_dir(f"/var/{k}") + + def set_config(extra: Dict[Any, Any] = {}) -> Config: + config.read_config() + config.mercure = Config(**{**config.mercure.dict(), **extra}) # type: ignore + print(config.mercure.targets) + config.save_config() + return config.mercure + + # set_config() + # sqlite3 is not inside the fakefs so this is going to be a real file + set_config({"bookkeeper": "sqlite:///tmp/mercure_bookkeeper_" + str(uuid.uuid4()) + ".db"}) + + bookkeeper_env = f"""PORT={bookkeeper_port} +HOST=0.0.0.0 +DATABASE_URL={config.mercure.bookkeeper}""" + fs.create_file(bookkeeper.bk_config.config_filename, contents=bookkeeper_env) + + fs.add_real_directory(os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../alembic')) + fs.add_real_file(os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../alembic.ini'), read_only=True) + return set_config + + +def random_port() -> int: + """ + Generate a free port number to use as an ephemeral endpoint. + """ + s = socket.socket() + s.bind(('', 0)) # bind to any available port + port = s.getsockname()[1] # get the port number + s.close() + return int(port) diff --git a/app/tests/dispatch/test_restart_job.py b/app/tests/dispatch/test_restart_job.py index 15aaa1db..e0f29032 100644 --- a/app/tests/dispatch/test_restart_job.py +++ b/app/tests/dispatch/test_restart_job.py @@ -17,8 +17,7 @@ from process import processor from pytest_mock import MockerFixture from routing import router -from tests.testing_common import (FakeDockerContainer, bookkeeper_port, make_fake_processor, mercure_config, # noqa: F401 - mock_incoming_uid, mock_task_ids, mocked) +from tests.testing_common import FakeDockerContainer, make_fake_processor, mock_incoming_uid, mock_task_ids from webinterface.queue import RestartTaskErrors, restart_dispatch logger = config.get_logger() diff --git a/app/tests/dispatch/test_send.py b/app/tests/dispatch/test_send.py index ee7e45b0..263ec921 100644 --- a/app/tests/dispatch/test_send.py +++ b/app/tests/dispatch/test_send.py @@ -8,7 +8,6 @@ from common.constants import mercure_names from common.monitor import m_events, severity, task_event from dispatch.send import execute, is_ready_for_sending -from tests.testing_common import * dummy_info = { "action": "route", diff --git a/app/tests/integration/__init__.py b/app/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/app/tests/integration/common.py b/app/tests/integration/common.py new file mode 100644 index 00000000..38910da7 --- /dev/null +++ b/app/tests/integration/common.py @@ -0,0 +1,71 @@ +import subprocess +import tempfile +from dataclasses import dataclass + +import pydicom +import requests + + +def send_dicom(ds, dest_host, dest_port) -> None: + with tempfile.NamedTemporaryFile('w') as ds_temp: + ds.save_as(ds_temp.name) + subprocess.run(["dcmsend", dest_host, str(dest_port), ds_temp.name], check=True) + + +@dataclass +class MercureService: + name: str + command: str + numprocs: int = 1 + stopasgroup: bool = False + startsecs: int = 0 + + +def is_dicoms_received(mercure_base, dicoms) -> None: + dicoms_recieved = set() + for series_folder in (mercure_base / 'data' / 'incoming').glob('*/'): + for dicom in series_folder.glob('*.dcm'): + ds_ = pydicom.dcmread(dicom) + assert ds_.SeriesInstanceUID == series_folder.name + assert ds_.SOPInstanceUID not in dicoms_recieved + dicoms_recieved.add(ds_.SOPInstanceUID) + + assert dicoms_recieved == set(ds.SOPInstanceUID for ds in dicoms) + print(f"Received {len(dicoms)} dicoms as expected") + + +def is_dicoms_in_folder(folder, dicoms) -> None: + uids_found = set() + print(f"Looking for dicoms in {folder}") + dicoms_found = [] + for f in folder.rglob('*'): + if not f.is_file(): + continue + if f.suffix == '.dcm': + dicoms_found.append(f) + if f.suffix not in ('.error', '.tags'): + dicoms_found.append(f) + print("Dicoms", dicoms_found) + for dicom in dicoms_found: + + try: + uid = pydicom.dcmread(dicom).SOPInstanceUID + uids_found.add(uid) + except Exception: + pass + try: + assert uids_found == set(ds.SOPInstanceUID for ds in dicoms), f"Dicoms missing from {folder}" + except Exception: + print("Expected dicoms not found") + for dicom in folder.glob('**/*.dcm'): + print(dicom) + raise + print(f"Found {len(dicoms)} dicoms in {folder.name} as expected") + + +def is_series_registered(bookkeeper_port, dicoms) -> None: + result = requests.get(f"http://localhost:{bookkeeper_port}/query/series", + headers={"Authorization": "Token test"}) + assert result.status_code == 200 + result_json = result.json() + assert set([r['series_uid'] for r in result_json]) == set([d.SeriesInstanceUID for d in dicoms]) diff --git a/app/tests/testing_integration_common.py b/app/tests/integration/conftest.py similarity index 74% rename from app/tests/testing_integration_common.py rename to app/tests/integration/conftest.py index ad3389ea..76de1665 100644 --- a/app/tests/testing_integration_common.py +++ b/app/tests/integration/conftest.py @@ -1,34 +1,28 @@ import json import multiprocessing import os -import socket import subprocess import sys import tempfile import threading import time import xmlrpc.client -from dataclasses import dataclass from pathlib import Path from typing import Any, Callable, Generator, Optional -import pydicom import pytest -import requests from common.config import mercure_defaults from supervisor.options import ServerOptions from supervisor.states import RUNNING_STATES from supervisor.supervisord import Supervisor from supervisor.xmlrpc import SupervisorTransport -# current workding directory -here = os.path.abspath(os.getcwd()) +from app.tests.integration.common import MercureService -def send_dicom(ds, dest_host, dest_port) -> None: - with tempfile.NamedTemporaryFile('w') as ds_temp: - ds.save_as(ds_temp.name) - subprocess.run(["dcmsend", dest_host, str(dest_port), ds_temp.name], check=True) +# current workding directory +def here() -> str: + return os.path.abspath(os.getcwd()) class SupervisorManager: @@ -151,65 +145,6 @@ def stop(self) -> None: pass -@dataclass -class MercureService: - name: str - command: str - numprocs: int = 1 - stopasgroup: bool = False - startsecs: int = 0 - - -def is_dicoms_received(mercure_base, dicoms) -> None: - dicoms_recieved = set() - for series_folder in (mercure_base / 'data' / 'incoming').glob('*/'): - for dicom in series_folder.glob('*.dcm'): - ds_ = pydicom.dcmread(dicom) - assert ds_.SeriesInstanceUID == series_folder.name - assert ds_.SOPInstanceUID not in dicoms_recieved - dicoms_recieved.add(ds_.SOPInstanceUID) - - assert dicoms_recieved == set(ds.SOPInstanceUID for ds in dicoms) - print(f"Received {len(dicoms)} dicoms as expected") - - -def is_dicoms_in_folder(folder, dicoms) -> None: - uids_found = set() - print(f"Looking for dicoms in {folder}") - dicoms_found = [] - for f in folder.rglob('*'): - if not f.is_file(): - continue - if f.suffix == '.dcm': - dicoms_found.append(f) - if f.suffix not in ('.error', '.tags'): - dicoms_found.append(f) - print("Dicoms", dicoms_found) - for dicom in dicoms_found: - - try: - uid = pydicom.dcmread(dicom).SOPInstanceUID - uids_found.add(uid) - except Exception: - pass - try: - assert uids_found == set(ds.SOPInstanceUID for ds in dicoms), f"Dicoms missing from {folder}" - except Exception: - print("Expected dicoms not found") - for dicom in folder.glob('**/*.dcm'): - print(dicom) - raise - print(f"Found {len(dicoms)} dicoms in {folder.name} as expected") - - -def is_series_registered(bookkeeper_port, dicoms) -> None: - result = requests.get(f"http://localhost:{bookkeeper_port}/query/series", - headers={"Authorization": "Token test"}) - assert result.status_code == 200 - result_json = result.json() - assert set([r['series_uid'] for r in result_json]) == set([d.SeriesInstanceUID for d in dicoms]) - - @pytest.fixture(scope="function") def supervisord(mercure_base): supervisor: Optional[SupervisorManager] = None @@ -248,7 +183,7 @@ def python_bin(): if os.environ.get("CLEAN_VENV"): with tempfile.TemporaryDirectory(prefix="mercure_venv") as venvdir: subprocess.run([sys.executable, "-m", "venv", venvdir], check=True) - subprocess.run([os.path.join(venvdir, "bin", "pip"), "install", "-r", f"{here}/requirements.txt"], check=True) + subprocess.run([os.path.join(venvdir, "bin", "pip"), "install", "-r", f"{here()}/requirements.txt"], check=True) yield (venvdir + "/bin/python") else: yield sys.executable @@ -270,7 +205,7 @@ def mercure(supervisord: Callable[[Any], SupervisorManager], python_bin ) -> Generator[Callable[[Any], SupervisorManager], None, None]: def py_service(service, **kwargs) -> MercureService: if 'command' not in kwargs: - kwargs['command'] = f"{python_bin} {here}/app/{service}.py" + kwargs['command'] = f"{python_bin} {here()}/app/{service}.py" return MercureService(service, **kwargs) services = [ py_service("bookkeeper", startsecs=6), @@ -280,7 +215,7 @@ def py_service(service, **kwargs) -> MercureService: py_service("worker_fast", command=f"{python_bin} -m rq.cli worker mercure_fast"), py_service("worker_slow", command=f"{python_bin} -m rq.cli worker mercure_slow") ] - services += [MercureService("receiver", f"{here}/app/receiver.sh --inject-errors", stopasgroup=True)] + services += [MercureService("receiver", f"{here()}/app/receiver.sh --inject-errors", stopasgroup=True)] supervisor = supervisord(services) def do_start(services_to_start=["bookkeeper", "reciever", "router", "processor", "dispatcher"]) -> SupervisorManager: @@ -295,27 +230,6 @@ def do_start(services_to_start=["bookkeeper", "reciever", "router", "processor", print("=============") -def random_port() -> int: - """ - Generate a free port number to use as an ephemeral endpoint. - """ - s = socket.socket() - s.bind(('', 0)) # bind to any available port - port = s.getsockname()[1] # get the port number - s.close() - return int(port) - - -@pytest.fixture(scope="module") -def receiver_port(): - return random_port() - - -@pytest.fixture(scope="module") -def bookkeeper_port(): - return random_port() - - @pytest.fixture(scope="function") def mercure_config(mercure_base, receiver_port, bookkeeper_port): mercure_config = {k: v for k, v in mercure_defaults.items()} diff --git a/app/tests/test_integration.py b/app/tests/integration/test_integration.py similarity index 97% rename from app/tests/test_integration.py rename to app/tests/integration/test_integration.py index afdf79f7..b09ec7d6 100644 --- a/app/tests/test_integration.py +++ b/app/tests/integration/test_integration.py @@ -3,8 +3,7 @@ import pytest from common.types import FolderTarget, Module, Rule -from testing_integration_common import * -from testing_integration_common import is_dicoms_in_folder, is_dicoms_received, is_series_registered, send_dicom +from app.tests.integration.common import is_dicoms_in_folder, is_dicoms_received, is_series_registered, send_dicom from tests.testing_common import create_minimal_dicom diff --git a/app/tests/test_bookkeeper.py b/app/tests/test_bookkeeper.py index f395c847..0fc42345 100644 --- a/app/tests/test_bookkeeper.py +++ b/app/tests/test_bookkeeper.py @@ -7,7 +7,6 @@ import requests from bookkeeping import bookkeeper -from testing_common import bookkeeper_port, mercure_config # noqa: F401 # def run_server(app, port): # b.uvicorn.run(app, host="localhost", port=port) diff --git a/app/tests/test_cleaner.py b/app/tests/test_cleaner.py index 6b6ba87a..f802d392 100644 --- a/app/tests/test_cleaner.py +++ b/app/tests/test_cleaner.py @@ -9,7 +9,6 @@ import common.helper as helper from freezegun import freeze_time from pyfakefs.fake_filesystem import FakeFilesystem -from testing_common import * # noqa: F401 # helper func diff --git a/app/tests/test_notifications.py b/app/tests/test_notifications.py index e446b137..543eea47 100644 --- a/app/tests/test_notifications.py +++ b/app/tests/test_notifications.py @@ -17,17 +17,16 @@ from process import processor from pytest_mock import MockerFixture from routing import router -from testing_common import * -from testing_common import FakeDockerContainer, make_fake_processor, mock_incoming_uid + +from .testing_common import FakeDockerContainer, make_fake_processor, mock_incoming_uid logger = config.get_logger() processor_path = Path() -def make_config(action, trigger_reception, trigger_completion, - trigger_completion_on_request, - trigger_error, do_request, do_error) -> Dict[str, Dict]: +def make_config(action, trigger_reception, trigger_completion, trigger_completion_on_request, trigger_error, do_request, do_error + ) -> Dict[str, Dict]: if action in ("both", "route"): if do_error: target = "dummy_error" diff --git a/app/tests/test_processor.py b/app/tests/test_processor.py index 7690a819..80e693ab 100644 --- a/app/tests/test_processor.py +++ b/app/tests/test_processor.py @@ -28,8 +28,8 @@ from process import processor from pytest_mock import MockerFixture from routing import router -from testing_common import * -from testing_common import FakeDockerContainer, FakeImageContainer, make_fake_processor, mock_incoming_uid, mock_task_ids + +from .testing_common import FakeDockerContainer, FakeImageContainer, make_fake_processor, mock_incoming_uid, mock_task_ids logger = config.get_logger() diff --git a/app/tests/test_pydantic.py b/app/tests/test_pydantic.py index a4e8e4cb..af4ce1ff 100644 --- a/app/tests/test_pydantic.py +++ b/app/tests/test_pydantic.py @@ -2,7 +2,6 @@ test_bookkeeper.py ================== """ -from testing_common import * # noqa: F401 def test_config(mercure_config, mocker): diff --git a/app/tests/test_query.py b/app/tests/test_query.py index be84a80e..9de25deb 100644 --- a/app/tests/test_query.py +++ b/app/tests/test_query.py @@ -20,7 +20,7 @@ from pynetdicom.sop_class import Verification # type: ignore from routing import router from rq import SimpleWorker -from testing_common import bookkeeper_port, mercure_config, receiver_port # noqa: F401 +# from testing_common import bookkeeper_port, mercure_config, receiver_port # noqa: F401 from webinterface.dashboards.query.jobs import GetAccessionTask, QueryPipeline from webinterface.dicom_client import SimpleDicomClient @@ -206,8 +206,8 @@ def test_query_job(dicom_server, tempdir, rq_connection, fs): """ fs.pause() try: - if (subprocess.run(['systemctl', 'is-active', "mercure_worker*"], capture_output=True, text=True, check=False, - ).stdout.strip() == 'active'): + if (subprocess.run(['systemctl', 'is-active', "mercure_worker*"], + capture_output=True, text=True, check=False).stdout.strip() == 'active'): raise Exception("At least one mercure worker is running, stop it before running test.") except subprocess.CalledProcessError: pass @@ -295,8 +295,8 @@ def test_query_dicomweb(dicomweb_server, tempdir, dummy_datasets, fs, rq_connect def test_query_operations(dicomweb_server, tempdir, dummy_datasets, fs, rq_connection): (tempdir / "outdir").mkdir() - task = QueryPipeline.create([ds.AccessionNumber for ds in dummy_datasets.values()], {}, - dicomweb_server, (tempdir / "outdir"), redis_server=rq_connection) + task = QueryPipeline.create([ds.AccessionNumber for ds in dummy_datasets.values()], + {}, dicomweb_server, (tempdir / "outdir"), redis_server=rq_connection) assert task assert task.meta['total'] == len(dummy_datasets) assert task.meta['completed'] == 0 @@ -330,8 +330,8 @@ def test_query_operations(dicomweb_server, tempdir, dummy_datasets, fs, rq_conne def test_query_retry(dicom_server_2: Tuple[DicomTarget, DummyDICOMServer], tempdir, dummy_datasets, fs, rq_connection): (tempdir / "outdir").mkdir() target, server = dicom_server_2 - task = QueryPipeline.create([ds.AccessionNumber for ds in dummy_datasets.values()], {}, - target, (tempdir / "outdir"), redis_server=rq_connection) + task = QueryPipeline.create([ds.AccessionNumber for ds in dummy_datasets.values()], + {}, target, (tempdir / "outdir"), redis_server=rq_connection) server.remaining_allowed_accessions = 1 # Only one accession is allowed to be retrieved w = SimpleWorker(["mercure_fast", "mercure_slow"], connection=rq_connection) diff --git a/app/tests/test_router.py b/app/tests/test_router.py index 109053dc..42bca6e2 100644 --- a/app/tests/test_router.py +++ b/app/tests/test_router.py @@ -17,8 +17,8 @@ from dispatch import dispatcher from pyfakefs.fake_filesystem import FakeFilesystem from routing import router -from testing_common import * -from testing_common import generate_uid, mock_incoming_uid, mock_task_ids, process_dicom + +from .testing_common import generate_uid, mock_incoming_uid, mock_task_ids, process_dicom rules = { "rules": { diff --git a/app/tests/test_studies.py b/app/tests/test_studies.py index 8d4d2304..037d1e4c 100644 --- a/app/tests/test_studies.py +++ b/app/tests/test_studies.py @@ -15,8 +15,8 @@ from process import processor from pyfakefs.fake_filesystem import FakeFilesystem from routing import router -from testing_common import * -from testing_common import mock_incoming_uid + +from .testing_common import mock_incoming_uid def create_series(mocked, fs, config, study_uid, series_uid, series_description, study_description="") -> Tuple[str, str]: @@ -361,4 +361,4 @@ def test_route_study_force_complete(fs: FakeFilesystem, mercure_config, mocked, assert list(discard_path.glob("**/*")) == [] elif force_complete_action == "discard": assert list(discard_path.glob("**/*")) != [] - assert list(out_path.glob("**/*")) == [] \ No newline at end of file + assert list(out_path.glob("**/*")) == [] diff --git a/app/tests/testing_common.py b/app/tests/testing_common.py index 2e1f4591..73fa8831 100644 --- a/app/tests/testing_common.py +++ b/app/tests/testing_common.py @@ -3,22 +3,16 @@ ================= """ import json -import os import shutil -import socket import uuid from pathlib import Path from typing import Any, Callable, Dict, Iterator, Optional, Tuple import common # noqa: F401 -import common.config as config import docker.errors import process # noqa: F401 import pydicom -import pytest import routing # noqa: F401 -from bookkeeping import bookkeeper -from common.types import Config from pydicom.dataset import Dataset, FileDataset, FileMetaDataset from pydicom.uid import generate_uid from tests.getdcmtags import process_dicom @@ -26,109 +20,27 @@ pydicom.config.settings.reading_validation_mode = pydicom.config.IGNORE pydicom.config.settings.writing_validation_mode = pydicom.config.IGNORE - -def spy_on(mocker, obj) -> None: - pieces = obj.split(".") - module = ".".join(pieces[0:-1]) - mocker.patch(obj, new=mocker.spy(eval(module), pieces[-1])) - - -def spies(mocker, list_of_spies) -> None: - for spy in list_of_spies: - spy_on(mocker, spy) - - -def attach_spies(mocker) -> None: - spies( - mocker, - [ - "routing.route_series.push_series_serieslevel", - "routing.route_series.push_serieslevel_outgoing", - "routing.route_studies.route_study", - "routing.generate_taskfile.create_series_task", - "routing.route_studies.move_study_folder", - "routing.route_studies.push_studylevel_error", - "routing.generate_taskfile.create_study_task", - "routing.router.route_series", - "routing.router.route_studies", - "process.processor.process_series", - # "process.process_series", - "common.monitor.post", - "common.monitor.send_event", - "common.monitor.send_register_series", - "common.monitor.send_register_task", - "common.monitor.send_task_event", - "common.monitor.async_send_task_event", - "common.monitor.send_processor_output", - "common.monitor.send_update_task", - "common.notification.trigger_notification_for_rule", - "common.notification.send_email", - "uuid.uuid1" - ], - ) - # mocker.patch("processor.process_series", new=mocker.spy(process.process_series, "process_series")) - - # spy_on(mocker, "routing.route_series.push_series_serieslevel") - # # mocker.patch( - # # "routing.route_series.push_series_serieslevel", new=mocker.spy(routing.route_series, "push_series_serieslevel") - # # ) - # mocker.patch( - # "routing.route_series.push_serieslevel_outgoing", - # new=mocker.spy(routing.route_series, "push_serieslevel_outgoing"), - # ) - # mocker.patch( - # "routing.generate_taskfile.create_series_task", new=mocker.spy(routing.generate_taskfile, "create_series_task") - # ) - - # mocker.patch("common.monitor.post", new=mocker.spy(common.monitor, "post")) - # mocker.patch("common.monitor.send_register_series", new=mocker.spy(common.monitor, "send_register_series")) - # mocker.patch("common.monitor.send_register_task", new=mocker.spy(common.monitor, "send_register_task")) - # mocker.patch("common.monitor.send_event", new=mocker.spy(common.monitor, "send_event")) - # mocker.patch("common.monitor.send_task_event", new=mocker.spy(common.monitor, "send_task_event")) - # mocker.patch("router.route_series", new=mocker.spy(router, "route_series")) - # mocker.patch("processor.process_series", new=mocker.spy(process.process_series, "process_series")) - - -@pytest.fixture(scope="function") -def mocked(mocker): - mocker.resetall() - attach_spies(mocker) - return mocker - - -@pytest.fixture(scope="module") -def bookkeeper_port(): - return random_port() - - -@pytest.fixture(scope="function", autouse=True) -def mercure_config(fs, bookkeeper_port) -> Callable[[Dict], Config]: - # TODO: config from previous calls seems to leak in here - config_path = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + "/data/test_config.json") - - fs.add_real_file(config_path, target_path=config.configuration_filename, read_only=False) - for k in ["incoming", "studies", "outgoing", "success", "error", "discard", "processing", "jobs"]: - fs.create_dir(f"/var/{k}") - - def set_config(extra: Dict[Any, Any] = {}) -> Config: - config.read_config() - config.mercure = Config(**{**config.mercure.dict(), **extra}) # type: ignore - print(config.mercure.targets) - config.save_config() - return config.mercure - - # set_config() - # sqlite3 is not inside the fakefs so this is going to be a real file - set_config({"bookkeeper": "sqlite:///tmp/mercure_bookkeeper_" + str(uuid.uuid4()) + ".db"}) - - bookkeeper_env = f"""PORT={bookkeeper_port} -HOST=0.0.0.0 -DATABASE_URL={config.mercure.bookkeeper}""" - fs.create_file(bookkeeper.bk_config.config_filename, contents=bookkeeper_env) - - fs.add_real_directory(os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../alembic')) - fs.add_real_file(os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../alembic.ini'), read_only=True) - return set_config +# mocker.patch("processor.process_series", new=mocker.spy(process.process_series, "process_series")) + +# spy_on(mocker, "routing.route_series.push_series_serieslevel") +# # mocker.patch( +# # "routing.route_series.push_series_serieslevel", new=mocker.spy(routing.route_series, "push_series_serieslevel") +# # ) +# mocker.patch( +# "routing.route_series.push_serieslevel_outgoing", +# new=mocker.spy(routing.route_series, "push_serieslevel_outgoing"), +# ) +# mocker.patch( +# "routing.generate_taskfile.create_series_task", new=mocker.spy(routing.generate_taskfile, "create_series_task") +# ) + +# mocker.patch("common.monitor.post", new=mocker.spy(common.monitor, "post")) +# mocker.patch("common.monitor.send_register_series", new=mocker.spy(common.monitor, "send_register_series")) +# mocker.patch("common.monitor.send_register_task", new=mocker.spy(common.monitor, "send_register_task")) +# mocker.patch("common.monitor.send_event", new=mocker.spy(common.monitor, "send_event")) +# mocker.patch("common.monitor.send_task_event", new=mocker.spy(common.monitor, "send_task_event")) +# mocker.patch("router.route_series", new=mocker.spy(router, "route_series")) +# mocker.patch("processor.process_series", new=mocker.spy(process.process_series, "process_series")) def mock_task_ids(mocker, task_id, next_task_id) -> None: @@ -237,7 +149,7 @@ def mock_incoming_uid(config, fs, series_uid, tags={}, name="bar", force_tags_ou incoming = Path(config.incoming_folder) dcm_file = incoming / f"{name}.dcm" create_minimal_dicom(dcm_file, series_uid, tags) - dcm_file = process_dicom(str(dcm_file), "0.0.0.0", "mercure", "mercure") + dcm_file = process_dicom(str(dcm_file), "0.0.0.0", "mercure", "mercure") or Path() tags_f = str(dcm_file).replace('.dcm', '.tags') # print("@@@@@@@", dcm_file, tags_f) @@ -256,19 +168,3 @@ def mock_incoming_uid(config, fs, series_uid, tags={}, name="bar", force_tags_ou # ( incoming / "receiver_info").mkdir(exist_ok=True) # ( incoming / "receiver_info" / (series_uid+".received")).touch() return str(dcm_file), tags_f - - -def random_port() -> int: - """ - Generate a free port number to use as an ephemeral endpoint. - """ - s = socket.socket() - s.bind(('', 0)) # bind to any available port - port = s.getsockname()[1] # get the port number - s.close() - return int(port) - - -@pytest.fixture(scope="module") -def receiver_port(): - return random_port() diff --git a/app/webinterface/dashboards/query/jobs.py b/app/webinterface/dashboards/query/jobs.py index 91dbbc5d..525036ea 100644 --- a/app/webinterface/dashboards/query/jobs.py +++ b/app/webinterface/dashboards/query/jobs.py @@ -65,8 +65,8 @@ def invoke_getdcmtags(file: Path, node: Union[DicomTarget, DicomWebTarget], forc is_fake_fs = isinstance(Path, pyfakefs.fake_pathlib.FakePathlibPathModule) if is_fake_fs: # running a test - result = process_dicom(file, sender_address, sender_aet, receiver_aet, - set_tags=[("mercureForceRule", force_rule)]) # don't bother with bookkeeper + result = process_dicom(file, sender_address, sender_aet, receiver_aet, # don't bother with bookkeeper + set_tags=[("mercureForceRule", force_rule)] if force_rule else []) if result is None: raise Exception("Failed to get DICOM tags from the file.") else: