diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fa5dd33..93151d9 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,7 +4,7 @@ name: CI-CD on: push: # push to any branch * - branches: [ "*"] + branches: [ "*" ] pull_request: branches: [ main , development] @@ -13,8 +13,10 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: + # os: [ubuntu-latest] + # python-version: ["3.12"] os: [ubuntu-latest, macos-latest, macos-14] - python-version: ["3.12", "3.11"] # "3.10", + python-version: ["3.12", "3.11"] steps: - uses: actions/checkout@v3 @@ -133,6 +135,12 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} + - name: Test Outputs of semantic release step + run: | + echo "${{ steps.release.outputs.released }}" + echo "${{ steps.release.outputs.version }}" + echo "${{ steps.release.outputs.tag }}" + - name: Install packaging-related tool run: python3 -m pip install build twine @@ -145,9 +153,8 @@ jobs: - name: Build package if: steps.release.outputs.released == 'true' run: | - poetry version $(git describe --tags --abbrev=0 | sed 's/^v//') + poetry version ${{ steps.release.outputs.version }} python -m build --sdist --wheel --outdir dist/ . - echo "LATEST_TAG=$(poetry version | sed 's/^nbiatoolkit //')" >> "$GITHUB_ENV" - name: Publish package distributions to PyPI if: steps.release.outputs.released == 'true' @@ -157,23 +164,6 @@ jobs: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} - - name: Print branch name - run: | - printf "LATEST TAG: %s\n" "${LATEST_TAG}" - echo "DOCKER_IMAGE_TAG=${LATEST_TAG}" >> "$GITHUB_ENV" - printf "DOCKER_IMAGE_TAG: %s\n" "${DOCKER_IMAGE_TAG}" - - - name: Print outputs from semantic release - run: | - # Semantic release gha provides the following outputs: - # - released: true if a new release was created, false otherwise - # - version: the new version number - # - tag: The Git tag that was created - echo "${{ steps.release.outputs.released }}" - echo "${{ steps.release.outputs.version }}" - echo "${{ steps.release.outputs.tag }}" - echo "${{ steps.release.outputs.release_notes }}" - - name: Set up QEMU if: steps.release.outputs.released == 'true' uses: docker/setup-qemu-action@v3 @@ -202,8 +192,8 @@ jobs: uses: docker/metadata-action@v3 with: images: | - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ env.DOCKER_IMAGE_TAG }} - ghcr.io/${{ github.repository }}/nbiatoolkit:${{ env.DOCKER_IMAGE_TAG }} + ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ steps.release.outputs.tag }} + ghcr.io/${{ github.repository }}/nbiatoolkit:${{ steps.release.outputs.tag }} - name: Build if: steps.release.outputs.released == 'true' @@ -214,9 +204,9 @@ jobs: file: ./Dockerfile push: true tags: | - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ env.DOCKER_IMAGE_TAG }} + ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ steps.release.outputs.tag }} ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:latest - ghcr.io/${{ github.repository }}/nbiatoolkit:${{ env.DOCKER_IMAGE_TAG }} + ghcr.io/${{ github.repository }}/nbiatoolkit:${{ steps.release.outputs.tag }} ghcr.io/${{ github.repository }}/nbiatoolkit:latest labels: ${{ steps.meta.outputs.labels }} @@ -226,7 +216,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, macos-14] - python-version: ["3.12", "3.11"] + python-version: ["3.12", "3.11", "3.10"] steps: - uses: actions/checkout@v3 @@ -251,15 +241,9 @@ jobs: - name: Test Image With new Tag run: | - # get latest tag - LATEST_TAG=$(curl -s \ - "https://api.github.com/repos/${{ github.repository }}/releases/latest" \ - | jq -r .tag_name | sed 's/^v//') - echo "LATEST_TAG=${LATEST_TAG}" - # test image with latest tag docker run --rm \ - ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${LATEST_TAG} \ + ${{ secrets.DOCKERHUB_USERNAME }}/nbiatoolkit:${{ steps.release.outputs.tag }} \ NBIAToolkit @@ -279,7 +263,7 @@ jobs: Update-README: needs: Continuous-Deployment runs-on: ubuntu-latest - # if: github.ref == 'refs/heads/development' + # if: jobs.Continuous-Deployment.outputs.released == 'true' steps: - name: Checkout code uses: actions/checkout@v3 diff --git a/.gitignore b/.gitignore index bd504df..d035a5a 100644 --- a/.gitignore +++ b/.gitignore @@ -157,3 +157,5 @@ src/nbiatoolkit/.DS_Store .vscode download.ipynb driver*.py +logs +logdir diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 21d0a30..a364407 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,11 @@ repos: - repo: https://github.com/psf/black - rev: 22.10.0 + rev: 24.2.0 hooks: - id: black + - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer diff --git a/README.md b/README.md index add6390..33d6e26 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ - ***Validate doownloads with MD5 checksums*** for downloaded images - **Auto-sort** DICOM files using a user-defined pattern of DICOM tags with specialized ***DICOMSorter class*** - +![SequenceDiagram](https://www.mermaidchart.com/raw/ce7f489f-bf58-4827-aedb-e379ed7bffd3?theme=dark&version=v0.1&format=svg) ## Installation @@ -85,10 +85,10 @@ For quick access to the NBIA, the toolkit also provides a command line interface / |/ / __ |/ // /| | / / / __ \/ __ \/ / //_/ / __/ / /| / /_/ // // ___ |/ / / /_/ / /_/ / / ,< / / /_ /_/ |_/_____/___/_/ |_/_/ \____/\____/_/_/|_/_/\__/ - + Version: 0.33.0 -Available CLI tools: +Available CLI tools: getCollections [-h] [-u USERNAME] [-pw PASSWORD] [-p PREFIX] [-o OUTPUTFILE] [--version] diff --git a/docs/conf.py b/docs/conf.py index 1c2314c..974d678 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,6 +15,11 @@ copyright = "2023, Jermiah Joseph" author = "Jermiah Joseph" +import nbiatoolkit + +# The full version, including alpha/beta/rc tags +release: str = nbiatoolkit.__version__ + # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be @@ -28,6 +33,7 @@ "sphinx.ext.viewcode", "sphinx_tabs.tabs", "sphinx_exec_code", + "sphinx.ext.autosectionlabel", ] autoapi_dirs = ["../src/nbiatoolkit"] diff --git a/docs/download_api/DownloadSeries.rst b/docs/download_api/DownloadSeries.rst new file mode 100644 index 0000000..3ecfd1b --- /dev/null +++ b/docs/download_api/DownloadSeries.rst @@ -0,0 +1,77 @@ +Download Series +^^^^^^^^^^^^^^ + + +The :meth:`downloadSeries` method is used to download an entire SeriesInstanceUID +from the NBIA Database. + +One of the features provided by the `nbiatoolkit` is the ability to configure +the folder structure of the downloaded files. This functionality is handled internally +by the `DICOMSorter` class. + +Configuration of the folder structure is done by passing a **`filePattern`** argument to the +:meth:`downloadSeries` method. The **`filePattern`** argument is a string constructed from +tags in the DICOM header. Tags are enclosed in `%` characters. For example, the following +**`filePattern`** string: + +.. code-block:: python + + filePattern = '%PatientID/%StudyInstanceUID/%SeriesInstanceUID/%InstanceNumber.dcm' + +will create a folder structure that looks like this: + +.. code-block:: bash + + PatientID + ├── StudyInstanceUID + │ └── SeriesInstanceUID + │ ├── 1.dcm + │ ├── 2.dcm + │ └── ... + ├── StudyInstanceUID + │ └── SeriesInstanceUID + │ ├── 1.dcm + │ ├── 2.dcm + │ └── ... + └── ... + +The **`filePattern`** string can be constructed from any DICOM tag. The following tags are +good candidates for constructing a **`filePattern`** string: + +- PatientID +- BodyPartExamined +- Modality +- StudyInstanceUID +- SeriesInstanceUID +- InstanceNumber +- SOPInstanceUID + + +To download a SeriesInstanceUID from the NBIA Database, use the :meth:`downloadSeries` method. + +.. automethod:: nbiatoolkit.NBIAClient.downloadSeries + + +.. tabs:: + .. tab:: Python + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + filePattern = '%PatientID/%StudyInstanceUID/%SeriesInstanceUID/%InstanceNumber.dcm' + downloadDir = './NBIA-Download' + nParallel = 5 + + with NBIAClient(return_type="dataframe") as client: + series = client.getSeries( + PatientID='TCGA-G2-A2EK' + ) + + client.downloadSeries( + series.SeriesInstanceUID, + downloadDir, + filePattern, + nParallel + ) diff --git a/docs/tutorial_files/1_InitializeClient.rst b/docs/getting_started/InitializeClient.rst similarity index 96% rename from docs/tutorial_files/1_InitializeClient.rst rename to docs/getting_started/InitializeClient.rst index c03630a..7114f0b 100644 --- a/docs/tutorial_files/1_InitializeClient.rst +++ b/docs/getting_started/InitializeClient.rst @@ -118,7 +118,8 @@ If you would like to return the data as a pandas DataFrame, you can pass the Feel free to open an issue on the GitHub repository if you would like to see this feature added. -Alternatively, you can set the return type for all methods by passing the `return_type` argument to the NBIAClient class. +Alternatively, you can set the return type for all methods by passing the `return_type` argument when +initializing the NBIAClient class. .. tabs:: @@ -141,7 +142,8 @@ Alternatively, you can set the return type for all methods by passing the `retur Logging ^^^^^^^ -The client can be initialized with a log level to control the verbosity of the logs. This is primarily intended for debugging and development purposes. +The client can be initialized with a log level to control the verbosity of the logs. This is primarily +intended for debugging and development purposes. The default log level is 'INFO' and the available log levels are `DEBUG`, `INFO`, `WARNING`, `ERROR`. .. tabs:: diff --git a/docs/getting_started/Installation.rst b/docs/getting_started/Installation.rst new file mode 100644 index 0000000..113c681 --- /dev/null +++ b/docs/getting_started/Installation.rst @@ -0,0 +1,40 @@ +Installation +____________ + +`nbiatoolkit` is currently under development and is not guaranteed to be stable. +Please refer to the `1.0.0 Stable Release Milestone `_ +for the roadmap to the first stable release. + +PyPi Installation +~~~~~~~~~~~~~~~~~ + +The easiest way to install `nbiatoolkit` is to use `pip` to install it from the +`Python Package Index (PyPi) `_. + +.. code-block:: console + + $ pip install nbiatoolkit + +***NOTE: It is recommended that you install the package in a conda or virtual environment.*** + +Conda Installation +~~~~~~~~~~~~~~~~~~ + +Though the package is not available on conda, you can create a conda environment and install the package using pip: + +.. code-block:: console + + $ conda create -n nbia python=3.12 + $ conda activate nbia + $ pip install nbiatoolkit + +Virtual Environment Installation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you prefer to use a virtual environment, you can create one and install the package using pip: + +.. code-block:: console + + $ python3 -m venv nbia + $ source nbia/bin/activate + $ pip install nbiatoolkit diff --git a/docs/markdowns/NBIA.md b/docs/getting_started/NBIA.md similarity index 100% rename from docs/markdowns/NBIA.md rename to docs/getting_started/NBIA.md diff --git a/docs/tutorial_files/logger.rst b/docs/getting_started/logger.rst similarity index 100% rename from docs/tutorial_files/logger.rst rename to docs/getting_started/logger.rst diff --git a/docs/index.md b/docs/index.md deleted file mode 100755 index d2075fd..0000000 --- a/docs/index.md +++ /dev/null @@ -1,17 +0,0 @@ -```{include} ../README.md -``` - -```{toctree} -:maxdepth: 2 -:hidden: - -markdowns/NBIA.md -markdowns/Installation.md -tutorial_files/1_InitializeClient.rst -tutorial_files/2_ExploreCollections.rst -tutorial_files/logger.rst -markdowns/CHANGELOG.md -markdowns/CONTRIBUTING.md -markdowns/CONDUCT.md -autoapi/index -``` diff --git a/docs/index.rst b/docs/index.rst new file mode 100755 index 0000000..9dcb2cb --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,43 @@ +.. include:: ../README.md + :parser: myst_parser.sphinx_ + +.. toctree:: + :caption: Getting started + :name: getting-started + :hidden: + :maxdepth: 2 + + getting_started/NBIA.md + getting_started/Installation.rst + getting_started/InitializeClient.rst + getting_started/logger.rst + +.. toctree:: + :caption: Querying API + :name: querying-api + :hidden: + :maxdepth: 2 + + querying_api/ExploringCollections.rst + querying_api/ExploringModalities.rst + querying_api/ExploringPatients.rst + querying_api/ExploringStudies.rst + querying_api/ExploringSeries.rst + +.. toctree:: + :caption: Downloading API + :name: downloading-api + :hidden: + :maxdepth: 2 + + download_api/DownloadSeries.rst + +.. toctree:: + :caption: Project Info + :name: project-info + :hidden: + :maxdepth: 2 + + project_info/CONTRIBUTING.md + project_info/CHANGELOG.md + project_info/CONDUCT.md diff --git a/docs/markdowns/Installation.md b/docs/markdowns/Installation.md deleted file mode 100644 index 159d3f4..0000000 --- a/docs/markdowns/Installation.md +++ /dev/null @@ -1,37 +0,0 @@ -# Installation - -`nbiatoolkit` is currently under development and is not guaranteed to be stable. -Please refer to the [1.0.0 Stable Release Milestone](https://github.com/jjjermiah/nbia-toolkit/milestone/1) for the roadmap -to the first stable release. - -## PyPi - -It is made available via PyPI and can be installed using pip: -```bash -pip install nbiatoolkit -``` - -***NOTE: It is recommended that you install the package in a conda or virtual environment.*** -## Conda - -Though the package is not available on conda, you can create a conda environment and install the package using pip: - -``` bash -conda create -n nbia python=3.12 -conda activate nbia -pip install nbiatoolkit -``` - -## Virtual Environment -If you do not have a virtual environment set up, you can create one using the following command: - -```bash -python -m venv /path/to/new/virtual/environment -``` - -Then activate the virtual environment and install the package using the commands: - -```bash -source /path/to/new/virtual/environment/bin/activate -pip install nbiatoolkit -``` diff --git a/docs/markdowns/CHANGELOG.md b/docs/project_info/CHANGELOG.md similarity index 97% rename from docs/markdowns/CHANGELOG.md rename to docs/project_info/CHANGELOG.md index 5410ed5..47b0888 100644 --- a/docs/markdowns/CHANGELOG.md +++ b/docs/project_info/CHANGELOG.md @@ -2,10 +2,55 @@ +## v0.34.0 (2024-03-16) + +### Build + +* build: try using release output version ([`2d7508a`](https://github.com/jjjermiah/nbia-toolkit/commit/2d7508a1fd1eb910460b62ffa992feda4792576f)) + +### Chore + +* chore: Update README: 0.33.0 ([`24b90c3`](https://github.com/jjjermiah/nbia-toolkit/commit/24b90c3dcb3ed8f11434414acd045dce5739fc3b)) + +* chore: Update README: 0.33.0 ([`adcfde5`](https://github.com/jjjermiah/nbia-toolkit/commit/adcfde53faefee8fb1938d42848112fb2f2f4579)) + +### Documentation + +* docs: improve documentation structure ([`1404c10`](https://github.com/jjjermiah/nbia-toolkit/commit/1404c1086e45e0adcc89acacb15ab0568ce2d406)) + +### Feature + +* feat: add counts to getModality and update docs for modality methods ([`54874a3`](https://github.com/jjjermiah/nbia-toolkit/commit/54874a316956245ecd4971e33bc043d9b00b23f7)) + +### Fix + +* fix: update changelog path ([`768124a`](https://github.com/jjjermiah/nbia-toolkit/commit/768124a9e06681e2de3dbb96d07ca2b654a78b03)) + +* fix: semver temp ([`5ff0550`](https://github.com/jjjermiah/nbia-toolkit/commit/5ff0550d848159cdbe7553940f5400ad29cca408)) + +* fix: semver temp ([`d378670`](https://github.com/jjjermiah/nbia-toolkit/commit/d378670f999b1e62056453392a05ae838a8449bb)) + +* fix: remove if context ([`6221885`](https://github.com/jjjermiah/nbia-toolkit/commit/6221885cbad716b0153667ea9186ef71bc3e4e02)) + +* fix: context variable ([`e7b630e`](https://github.com/jjjermiah/nbia-toolkit/commit/e7b630e37a66aac9413a58bf08623e692e933a42)) + +* fix: branch name formatting in CI/CD workflow ([`e38de75`](https://github.com/jjjermiah/nbia-toolkit/commit/e38de75a5c61c50efe409e7570f59ee2381df490)) + +### Refactor + +* refactor: Fix formatting issues and import statements ([`b5f5cb4`](https://github.com/jjjermiah/nbia-toolkit/commit/b5f5cb4526ae5ae04b3f25f3de8cf50486812b00)) + +* refactor: Refactor code formatting ([`f8b8ab8`](https://github.com/jjjermiah/nbia-toolkit/commit/f8b8ab82495d82748968f0f469494503499a6c3a)) + +* refactor: Add conv_response_list utility function ([`1aa0ad1`](https://github.com/jjjermiah/nbia-toolkit/commit/1aa0ad104ead472bd23f28c8c45280d4462dbac3)) + + ## v0.33.0 (2024-02-25) ### Chore +* chore(sem-ver): 0.33.0 ([`d0304cf`](https://github.com/jjjermiah/nbia-toolkit/commit/d0304cf7620f0dcce2d0749050fc3fdafd7433ce)) + * chore: Update README: 0.32.1 ([`d9ef88b`](https://github.com/jjjermiah/nbia-toolkit/commit/d9ef88b0ba93f1bcc3d72575189f0beb795a5964)) ### Documentation diff --git a/docs/markdowns/CONDUCT.md b/docs/project_info/CONDUCT.md similarity index 99% rename from docs/markdowns/CONDUCT.md rename to docs/project_info/CONDUCT.md index 7f2f520..e205209 100755 --- a/docs/markdowns/CONDUCT.md +++ b/docs/project_info/CONDUCT.md @@ -1,4 +1,4 @@ -# CODE OF CONDUCT +# Code of Conduct ## Our Pledge diff --git a/docs/markdowns/CONTRIBUTING.md b/docs/project_info/CONTRIBUTING.md similarity index 99% rename from docs/markdowns/CONTRIBUTING.md rename to docs/project_info/CONTRIBUTING.md index d54a9c6..de2ae19 100755 --- a/docs/markdowns/CONTRIBUTING.md +++ b/docs/project_info/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# CONTRIBUTING +# Contributing Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. diff --git a/docs/tutorial_files/2_ExploreCollections.rst b/docs/querying_api/ExploringCollections.rst similarity index 94% rename from docs/tutorial_files/2_ExploreCollections.rst rename to docs/querying_api/ExploringCollections.rst index 0d82c0e..a2f94df 100644 --- a/docs/tutorial_files/2_ExploreCollections.rst +++ b/docs/querying_api/ExploringCollections.rst @@ -1,7 +1,3 @@ -API Query Methods ----------------------- - - Collection Methods ^^^^^^^^^^^^^^^^^^ The simplest way to get a list of collections is to use the @@ -21,7 +17,10 @@ Passing a `prefix` parameter will return a list of collections that match the pr .. exec_code:: + # --- hide: start --- from nbiatoolkit import NBIAClient + from pprint import pprint as print + # --- hide: stop --- client = NBIAClient(return_type = "dataframe") collections_df = client.getCollections(prefix='TCGA') diff --git a/docs/querying_api/ExploringModalities.rst b/docs/querying_api/ExploringModalities.rst new file mode 100644 index 0000000..270eb86 --- /dev/null +++ b/docs/querying_api/ExploringModalities.rst @@ -0,0 +1,60 @@ +Modality Methods +^^^^^^^^^^^^^^^^ +The :meth:`getModalityValues` method can provide insight to the available modality types in the NBIA database. + +The method has the following signature: + +.. automethod:: nbiatoolkit.NBIAClient.getModalityValues + +Passing no parameters to the method will return a list of all modality types available in the NBIA database. +Filtering by :code:`Collection` and :code:`BodyPartExamined` is also possible. +The :code:`Counts` parameter can be set to :code:`True` to return the number of patients for each modality type. + +.. tabs:: + + .. tab:: Python + + .. tabs:: + + .. tab:: Default Query + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + modalities = client.getModalityValues() + + print(modalities) + + .. tab:: Filtered Query + + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + modalities = client.getModalityValues( + Collection = "TCGA-BLCA", + ) + + print(modalities) + + .. tab:: Counts Query + + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + modalities = client.getModalityValues( + Collection = "TCGA-BLCA", + Counts = True + ) + + print(modalities) diff --git a/docs/querying_api/ExploringPatients.rst b/docs/querying_api/ExploringPatients.rst new file mode 100644 index 0000000..9b17473 --- /dev/null +++ b/docs/querying_api/ExploringPatients.rst @@ -0,0 +1,91 @@ +Patient Methods +^^^^^^^^^^^^^^^ + +The :meth:`getPatients` method can provide insight to the +patient metadata available in the NBIA database. + +.. automethod:: nbiatoolkit.NBIAClient.getPatients + + +By default, the :meth:`getPatients` method will return all +patients in the NBIA database. However, the method can be +filtered by `Collection`. + +.. tabs:: + + .. tab:: Python + + .. tabs:: + + .. tab:: Get all patients + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + patients = client.getPatients() + + print(patients.head()) + + .. tab:: Filter by Collection + + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + patients = client.getPatients(Collection = "TCGA-BLCA") + + print(patients.head()) + + +For more granular filtering, the :meth:`getPatientsByCollectionAndModality` method +can be used to filter by `Collection` **and** `Modality` as both are required. +Unlike the :meth:`getPatients` method which returns additional metadata such as +`SpeciesCode`, `SpeciesDescription`, `PatientSex`, and `EthnicGroup`, this method will +only return a list of Patient IDs. + +.. automethod:: nbiatoolkit.NBIAClient.getPatientsByCollectionAndModality + +.. tabs:: + + .. tab:: Python + + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + patients = client.getPatientsByCollectionAndModality(Collection = "TCGA-BLCA", Modality = "MR") + + print(patients.head()) + + +.. automethod:: nbiatoolkit.NBIAClient.getNewPatients + +The :meth:`getNewPatients` method can be used to retrieve a list of patients that +have been added to the NBIA database within a specified time frame. + +.. tabs:: + + .. tab:: Python + + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + patients = client.getNewPatients( + Collection="CMB-LCA", + Date="2022/12/06", + ) + + print(patients.head()) diff --git a/docs/querying_api/ExploringSeries.rst b/docs/querying_api/ExploringSeries.rst new file mode 100644 index 0000000..038c89a --- /dev/null +++ b/docs/querying_api/ExploringSeries.rst @@ -0,0 +1,53 @@ +Series Methods +^^^^^^^^^^^^^^ + +The :meth:`getSeries` method can provide insight to the available series +in the NBIA database. + +.. automethod:: nbiatoolkit.NBIAClient.getSeries + +By default, the method will return all the series in the database. However, +it can be filtered by the following parameters: + +- **Collection** +- **PatientID** +- **StudyInstanceUID** +- **Modality** +- **SeriesInstanceUID** +- **BodyPartExamined** +- **ManufacturerModelName** +- **Manufacturer** + +The following examples demonstrate using the :meth:`getSeries`. + +.. tabs:: + .. tab:: Python + .. tabs:: + .. tab:: Filter by Collection + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + series = client.getSeries( + Collection = "TCGA-BLCA" + ) + + print(series.iloc[0]) + + .. tab:: Filter by Collection and PatientID + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + series = client.getSeries( + Collection = "TCGA-BLCA", + PatientID = "TCGA-G2-A2EK" + ) + + print(series.iloc[0]) diff --git a/docs/querying_api/ExploringStudies.rst b/docs/querying_api/ExploringStudies.rst new file mode 100644 index 0000000..e128003 --- /dev/null +++ b/docs/querying_api/ExploringStudies.rst @@ -0,0 +1,50 @@ +Studies Methods +^^^^^^^^^^^^^^^ + +The :meth:`getStudies` method can provide insight to the available studies in the +NBIA database. + + +.. automethod:: nbiatoolkit.NBIAClient.getStudies + +By default, the method requires filtering by **Collection**, but can optionally +be also filtered by **PatientID** and/or **StudyInstanceUID** as well. + + +The following example demonstrates how to use the :meth:`getStudies` method to filter the studies by the collection name. + +.. tabs:: + + .. tab:: Python + + .. tabs:: + + .. tab:: Get all studies + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + studies = client.getStudies( + Collection = "TCGA-BLCA" + ) + + print(studies.iloc[0]) + + .. tab:: Filter by Collection + + .. exec_code:: + + # --- hide: start --- + from nbiatoolkit import NBIAClient + # --- hide: stop --- + + with NBIAClient(return_type="dataframe") as client: + studies = client.getStudies( + Collection = "TCGA-BLCA", + PatientID = "TCGA-G2-A2EK" + ) + + print(studies.iloc[0]) diff --git a/poetry.lock b/poetry.lock index 5733dd9..43fa731 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "alabaster" @@ -3175,6 +3175,17 @@ sphinx = ">=6.1.0" [package.extras] docs = ["furo", "sphinx", "sphinx-design"] +[[package]] +name = "sphinx-exec-code" +version = "0.12" +description = "Execute code blocks in Sphinx and display the output" +optional = false +python-versions = "*" +files = [ + {file = "sphinx-exec-code-0.12.tar.gz", hash = "sha256:6b964eaa3d170d8a44e4db698b25e64c03a65681392f45a8f4a51c09f0ee70c9"}, + {file = "sphinx_exec_code-0.12-py3-none-any.whl", hash = "sha256:a4133e05147a2ef2d256bfb69872c3803aa566eacf88a28ae991f549527d3c72"}, +] + [[package]] name = "sphinx-rtd-theme" version = "1.3.0" @@ -3745,4 +3756,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10 || 3.12" -content-hash = "0ea99a83e5565b1b7124616e273cf78224fb1c8300b43a950b33eb0b1b98c547" +content-hash = "08a55ea078d191fe9148c91630c8792ddf3b032223713050ac6c06d5b342e40f" diff --git a/pyproject.toml b/pyproject.toml index e424929..46f6917 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nbiatoolkit" -version = "0.33.0" +version = "0.34.0" description = "A python package to query the National Biomedical Imaging Archive (NBIA) database." authors = ["Jermiah Joseph"] license = "MIT" @@ -16,7 +16,7 @@ getNewPatients = "nbiatoolkit.nbia_cli:getNewPatients_cli" getStudies = "nbiatoolkit.nbia_cli:getStudies_cli" getSeries = "nbiatoolkit.nbia_cli:getSeries_cli" getNewSeries = "nbiatoolkit.nbia_cli:getNewSeries_cli" -# downloadSingleSeries = "nbiatoolkit.nbia_cli:downloadSingleSeries_cli" + [tool.poetry.dependencies] python = ">=3.10 || 3.12" @@ -34,6 +34,7 @@ jupyter = "^1.0.0" pytest-cov = ">=4.0.0" pytest-xdist = ">=3.5.0" mkdocs = "1.5.3" +sphinx_exec_code = "0.12" myst-nb = {version = "^1.0.0", python = "^3.9"} sphinx-autoapi = "^3.0.0" sphinx-rtd-theme = "^1.3.0" @@ -51,7 +52,6 @@ version_variables = [ version_toml = [ "pyproject.toml:tool.poetry.version", ] -branch = "main" dist_path = "dist/" upload_to_release = true upload_to_pypi = false @@ -60,9 +60,12 @@ commit_message = "chore(sem-ver): {version}" patch_without_tag = true [tool.semantic_release.changelog] -changelog_file = "docs/markdowns/CHANGELOG.md" +changelog_file = "docs/project_info/CHANGELOG.md" exclude_commit_types = ["docs", "style", "refactor", "test", "chore"] +[tool.semantic_release.branches.main] +match = "(main|master)" + [tool.semantic_release.commit_parser_options] allowed_tags = [ diff --git a/src/nbiatoolkit/__init__.py b/src/nbiatoolkit/__init__.py index 0662694..80805ca 100644 --- a/src/nbiatoolkit/__init__.py +++ b/src/nbiatoolkit/__init__.py @@ -1,4 +1,4 @@ -## __init__.py +# __init__.py # Path: projects/nbia-toolkit/src/__init__.py # this is the __init__.py file diff --git a/src/nbiatoolkit/dicomsort/dicomsort.py b/src/nbiatoolkit/dicomsort/dicomsort.py index 8b7225e..3bbc294 100644 --- a/src/nbiatoolkit/dicomsort/dicomsort.py +++ b/src/nbiatoolkit/dicomsort/dicomsort.py @@ -6,6 +6,7 @@ # from typing import Optional # from tqdm import tqdm +from tqdm import tqdm import pydicom from pydicom.errors import InvalidDicomError import os @@ -114,9 +115,6 @@ def sortSingleDICOMFile( return True -from tqdm import tqdm - - class DICOMSorter: def __init__( self, diff --git a/src/nbiatoolkit/nbia.py b/src/nbiatoolkit/nbia.py index 1275a0a..21160ad 100644 --- a/src/nbiatoolkit/nbia.py +++ b/src/nbiatoolkit/nbia.py @@ -1,6 +1,7 @@ from calendar import c from inspect import getmodule from re import I +import re import zipfile from tempfile import TemporaryDirectory from .dicomsort import DICOMSorter @@ -18,6 +19,7 @@ convertDateFormat, parse_response, ReturnType, + conv_response_list, ) import pandas as pd import requests @@ -29,30 +31,7 @@ from datetime import datetime # set __version__ variable -__version__ = "0.33.0" - - -# function that takes a list of dictionaries and returns either a list or a dataframe -def conv_response_list( - response_json: List[dict[Any, Any]], - return_type: ReturnType, -) -> List[dict[Any, Any]] | pd.DataFrame: - """_summary_ - - :param response_json: _description_ - :type response_json: List[dict[Any, Any]] - :param return_type: _description_ - :type return_type: ReturnType - :return: _description_ - :rtype: List[dict[Any, Any]] | pd.DataFrame - """ - - assert isinstance(response_json, list), "The response JSON must be a list" - - if return_type == ReturnType.LIST: - return response_json - elif return_type == ReturnType.DATAFRAME: - return pd.DataFrame(data=response_json) +__version__ = "0.34.0" def downloadSingleSeries( @@ -63,6 +42,7 @@ def downloadSingleSeries( api_headers: dict[str, str], base_url: NBIA_BASE_URLS, log: Logger, + Progressbar: bool = False, ): """ Downloads a single series from the NBIA server. @@ -75,6 +55,7 @@ def downloadSingleSeries( api_headers (dict[str, str]): The headers to be included in the API request. base_url (NBIA_ENDPOINTS): The base URL of the NBIA server. log (Logger): The logger object for logging messages. + Progressbar (bool, optional): Flag indicating whether to display a progress bar. Defaults to False. Returns: bool: True if the series is downloaded and sorted successfully, False otherwise. @@ -110,7 +91,10 @@ def downloadSingleSeries( ) # sorter.sortDICOMFiles(option="move", overwrite=overwrite) if not sorter.sortDICOMFiles( - shutil_option="move", overwrite=overwrite, progressbar=False, n_parallel=1 + shutil_option="move", + overwrite=overwrite, + progressbar=Progressbar, + n_parallel=1, ): log.error( f"Error sorting DICOM files for series {SeriesInstanceUID}\n \ @@ -366,17 +350,43 @@ def getModalityValues( self, Collection: str = "", BodyPartExamined: str = "", + Counts: bool = False, return_type: Optional[Union[ReturnType, str]] = None, ) -> List[dict[Any, Any]] | pd.DataFrame: + """Retrieves possible modality values from the NBIA database. + + Args: + Collection (str, optional): Collection name to filter by. Defaults to "". + BodyPartExamined (str, optional): BodyPart name to filter by. Defaults to "". + Counts (bool, optional): Flag to indicate whether to return patient counts. Defaults to False. + return_type (Optional[Union[ReturnType, str]], optional): + Return type of the response. Defaults to None which uses the default return type. + + Returns: + List[dict[Any, Any]] | pd.DataFrame: + List of modality values or DataFrame containing the modality values. + """ + returnType: ReturnType = self._get_return(return_type) PARAMS: dict = self.parsePARAMS(params=locals()) - response: List[dict[Any, Any]] - response = self.query_api( - endpoint=NBIA_ENDPOINTS.GET_MODALITY_VALUES, params=PARAMS + endpoint = ( + NBIA_ENDPOINTS.GET_MODALITY_PATIENT_COUNT + if Counts + else NBIA_ENDPOINTS.GET_MODALITY_VALUES ) + response: List[dict[Any, Any]] + response = self.query_api(endpoint=endpoint, params=PARAMS) + + if Counts: + for modality in response: + modality["Modality"] = modality["criteria"] + modality["PatientCount"] = modality["count"] + del modality["criteria"] + del modality["count"] + return conv_response_list(response, returnType) def getPatients( @@ -384,6 +394,17 @@ def getPatients( Collection: str = "", return_type: Optional[Union[ReturnType, str]] = None, ) -> List[dict[Any, Any]] | pd.DataFrame: + """ + Retrieves a list of patients from the NBIA API. + + Args: + Collection (str, optional): The name of the collection to filter the patients. Defaults to "". + return_type (Optional[Union[ReturnType, str]], optional): The desired return type. Defaults to None. + + Returns: + List[dict[Any, Any]] | pd.DataFrame: A list of patient dictionaries or a pandas DataFrame, depending on the return type. + + """ returnType: ReturnType = self._get_return(return_type) PARAMS: dict = self.parsePARAMS(locals()) @@ -399,6 +420,21 @@ def getNewPatients( Date: Union[str, datetime], return_type: Optional[Union[ReturnType, str]] = None, ) -> List[dict[Any, Any]] | pd.DataFrame: + """ + Retrieves new patients from the NBIA API based on the specified collection and date. + + Args: + Collection (str): The name of the collection to retrieve new patients from. + Date (Union[str, datetime]): The date to filter the new patients. Can be a string in the format "YYYY/MM/DD" or a datetime object. + return_type (Optional[Union[ReturnType, str]]): The desired return type. Defaults to None. + + Returns: + List[dict[Any, Any]] | pd.DataFrame: A list of dictionaries or a pandas DataFrame containing the new patients. + + Raises: + AssertionError: If the Date argument is None. + + """ returnType: ReturnType = self._get_return(return_type) assert Date is not None @@ -421,6 +457,20 @@ def getPatientsByCollectionAndModality( Modality: str, return_type: Optional[Union[ReturnType, str]] = None, ) -> List[dict[Any, Any]] | pd.DataFrame: + """ + Retrieves patients by collection and modality. + + Args: + Collection (str): The collection name. + Modality (str): The modality name. + return_type (Optional[Union[ReturnType, str]], optional): The desired return type. Defaults to None. + + Returns: + List[dict[Any, Any]] | pd.DataFrame: The list of patients or a pandas DataFrame, depending on the return type. + + Raises: + AssertionError: If Collection or Modality is None. + """ assert Collection is not None assert Modality is not None @@ -460,6 +510,18 @@ def getStudies( StudyInstanceUID: str = "", return_type: Optional[Union[ReturnType, str]] = None, ) -> List[dict[Any, Any]] | pd.DataFrame: + """ + Retrieves studies from the NBIA API based on the specified parameters. + + Args: + Collection (str): The name of the collection to retrieve studies from. + PatientID (str, optional): The patient ID to filter the studies by. Defaults to "". + StudyInstanceUID (str, optional): The study instance UID to filter the studies by. Defaults to "". + return_type (Optional[Union[ReturnType, str]], optional): The desired return type. Defaults to None. + + Returns: + List[dict[Any, Any]] | pd.DataFrame: A list of dictionaries or a pandas DataFrame containing the retrieved studies. + """ returnType: ReturnType = self._get_return(return_type) PARAMS: dict = self.parsePARAMS(locals()) @@ -559,6 +621,7 @@ def downloadSeries( filePattern: str = "%PatientName/%Modality-%SeriesNumber-%SeriesInstanceUID/%InstanceNumber.dcm", overwrite: bool = False, nParallel: int = 1, + Progressbar: bool = False, ) -> bool: if isinstance(SeriesInstanceUID, str): SeriesInstanceUID = [SeriesInstanceUID] @@ -570,8 +633,8 @@ def downloadSeries( results = [] for series in SeriesInstanceUID: result = pool.apply_async( - downloadSingleSeries, - ( + func=downloadSingleSeries, + args=( series, downloadDir, filePattern, @@ -579,6 +642,7 @@ def downloadSeries( self._api_headers, self._base_url, self._log, + Progressbar, ), ) results.append(result) diff --git a/src/nbiatoolkit/nbia_cli.py b/src/nbiatoolkit/nbia_cli.py index b289d42..92091e4 100644 --- a/src/nbiatoolkit/nbia_cli.py +++ b/src/nbiatoolkit/nbia_cli.py @@ -3,6 +3,7 @@ from .dicomsort import DICOMSorter import argparse +from argparse import ArgumentParser import sys import threading import sys @@ -68,7 +69,8 @@ def _initialize_parser(description: str) -> argparse.ArgumentParser: "--username", action="store", type=str, - default="nbia_guest", # help="Username for the NBIA API (default: nbia_guest)" + # help="Username for the NBIA API (default: nbia_guest)" + default="nbia_guest", ) credentials.add_argument( @@ -214,7 +216,7 @@ def cli_wrapper(func, **kwargs) -> List[str] | None: def getPatients_cli() -> None: global query query = "patients" - p = _initialize_parser(description=f"NBIAToolkit: {query} ") + p: ArgumentParser = _initialize_parser(description=f"NBIAToolkit: {query} ") p.add_argument( "-c", @@ -235,7 +237,7 @@ def getPatients_cli() -> None: def getCollections_cli() -> None: global query query = "collections" - p = _initialize_parser(description=f"NBIAToolkit: {query} ") + p: ArgumentParser = _initialize_parser(description=f"NBIAToolkit: {query} ") p.add_argument( "-p", @@ -313,7 +315,7 @@ def getBodyPartCounts_cli() -> None: def getStudies_cli() -> None: global query query = f"getStudies" - p = _initialize_parser( + p: ArgumentParser = _initialize_parser( description=f"NBIAToolkit: {query}. Get studies from a collection." ) @@ -356,7 +358,7 @@ def getSeries_cli() -> None: global output query = f"series" - p = _initialize_parser(description=f"NBIAToolkit: {query} ") + p: ArgumentParser = _initialize_parser(description=f"NBIAToolkit: {query} ") p.add_argument( "-c", @@ -443,8 +445,9 @@ def getSeries_cli() -> None: def getNewSeries_cli() -> None: global query query = f"newSeries" - p = _initialize_parser( - description=f"NBIAToolkit: {query}. Get new series from a collection since a given date." + p: ArgumentParser = _initialize_parser( + description=f"NBIAToolkit: {query}. \ + Get new series from a collection since a given date." ) p.add_argument( @@ -468,7 +471,9 @@ def downloadSingleSeries_cli() -> None: query = f"series" # use the NBIAClient._downloadSingleSeries function to download a single series - p = _initialize_parser(description="NBIAToolkit: download a single series") + p: ArgumentParser = _initialize_parser( + description="NBIAToolkit: download a single series" + ) p.add_argument( "--seriesUID", @@ -522,7 +527,7 @@ def downloadSingleSeries_cli() -> None: def DICOMSorter_cli(): - parser = _initialize_parser( + parser: ArgumentParser = _initialize_parser( description="NBIAToolkit: Sort DICOM files into destination directory according to target pattern." ) diff --git a/src/nbiatoolkit/utils/__init__.py b/src/nbiatoolkit/utils/__init__.py index 676ecba..0a868ac 100644 --- a/src/nbiatoolkit/utils/__init__.py +++ b/src/nbiatoolkit/utils/__init__.py @@ -1,13 +1,14 @@ -from .nbia_endpoints import NBIA_ENDPOINTS, NBIA_BASE_URLS +from .nbia_endpoints import NBIA_ENDPOINTS, NBIA_BASE_URLS, ReturnType from .md5 import validateMD5 from .parsers import ( convertMillis, clean_html, convertDateFormat, parse_response, - ReturnType, ) +from .conv_response_list import conv_response_list + __all__ = [ "NBIA_ENDPOINTS", "NBIA_BASE_URLS", @@ -15,6 +16,7 @@ "convertMillis", "clean_html", "convertDateFormat", + "conv_response_list", "parse_response", "ReturnType", ] diff --git a/src/nbiatoolkit/utils/conv_response_list.py b/src/nbiatoolkit/utils/conv_response_list.py new file mode 100644 index 0000000..a123133 --- /dev/null +++ b/src/nbiatoolkit/utils/conv_response_list.py @@ -0,0 +1,26 @@ +from typing import Any, List +from .nbia_endpoints import ReturnType +import pandas as pd + + +# function that takes a list of dictionaries and returns either a list or a dataframe +def conv_response_list( + response_json: List[dict[Any, Any]], + return_type: ReturnType, +) -> List[dict[Any, Any]] | pd.DataFrame: + """Function that takes in a list of dictionaries and returns either a list or a dataframe. + + :param response_json: A response from the API in the form of a list of dictionaries. + :type response_json: List[dict[Any, Any]] + :param return_type: The desired return type for the response. + :type return_type: ReturnType + :return: Either a list of dictionaries or a DataFrame. + :rtype: List[dict[Any, Any]] | pd.DataFrame + """ + + assert isinstance(response_json, list), "The response JSON must be a list" + + if return_type == ReturnType.LIST: + return response_json + elif return_type == ReturnType.DATAFRAME: + return pd.DataFrame(data=response_json) diff --git a/src/nbiatoolkit/utils/nbia_endpoints.py b/src/nbiatoolkit/utils/nbia_endpoints.py index b65f06e..45296b2 100644 --- a/src/nbiatoolkit/utils/nbia_endpoints.py +++ b/src/nbiatoolkit/utils/nbia_endpoints.py @@ -28,6 +28,7 @@ class NBIA_ENDPOINTS(Enum): GET_COLLECTION_DESCRIPTIONS = "getCollectionDescriptions" GET_MODALITY_VALUES = "v2/getModalityValues" + GET_MODALITY_PATIENT_COUNT = "getModalityValuesAndCounts" GET_PATIENTS = "v2/getPatient" GET_NEW_PATIENTS_IN_COLLECTION = "v2/NewPatientsInCollection" @@ -42,15 +43,16 @@ class NBIA_ENDPOINTS(Enum): DOWNLOAD_SERIES = "v2/getImageWithMD5Hash" GET_DICOM_TAGS = "getDicomTags" - # curl -H "Authorization:Bearer YOUR_ACCESS_TOKEN" -k "https://services.cancerimagingarchive.net/nbia-api/services/v2/getSeriesMetaData" - # curl -H "Authorization:Bearer YOUR_ACCESS_TOKEN" -k "https://services.cancerimagingarchive.net/nbia-api/services/v2/getSeriesSize" - - # curl -H "Authorization:Bearer YOUR_ACCESS_TOKEN" -k "https://services.cancerimagingarchive.net/nbia-api/services/v2/getUpdatedSeries" - - # curl -H "Authorization:Bearer YOUR_ACCESS_TOKEN" -k "https://services.cancerimagingarchive.net/nbia-api/services/getSeriesMetadata2" -d "list=1.3.6.1.4.1.14519.5.2.1.6834.5010.322628904903035357840500590726" - # Helper functions def __str__(self): return self.value def _format(self): return self.value.split("/")[-1] + + +# so that users can decide between a List or a pd.DataFrame +class ReturnType(Enum): + LIST = "list" + DATAFRAME = "dataframe" + + # change .value so that DATAFRAME returns "pd.DataFrame" diff --git a/src/nbiatoolkit/utils/parsers.py b/src/nbiatoolkit/utils/parsers.py index 556a46a..c59a45e 100644 --- a/src/nbiatoolkit/utils/parsers.py +++ b/src/nbiatoolkit/utils/parsers.py @@ -4,15 +4,6 @@ from typing import Union, Any, Dict, List, Literal, Optional, Tuple import pandas as pd import requests -from enum import Enum - - -# so that users can decide between a List or a pd.DataFrame -class ReturnType(Enum): - LIST = "list" - DATAFRAME = "dataframe" - - # change .value so that DATAFRAME returns "pd.DataFrame" def clean_html(html_string: str) -> str: @@ -99,7 +90,6 @@ def parse_response(response: requests.Response) -> List[dict[Any, Any]]: assert ( response.status_code == 200 ), "The response status code must be 200 OK but is {}".format(response.status_code) - # TODO:: describe error 204 if not "application/json" in content_type: if response.content == b"": diff --git a/tests/test_ModalityQueries.py b/tests/test_ModalityQueries.py new file mode 100644 index 0000000..00637e9 --- /dev/null +++ b/tests/test_ModalityQueries.py @@ -0,0 +1,30 @@ +import pytest + +from src.nbiatoolkit import NBIAClient +from src.nbiatoolkit.utils import * +import pandas as pd + + +@pytest.fixture(scope="session") +def nbia_client(): + return NBIAClient() + + +def test_getModalityValues(nbia_client: NBIAClient): + modality_values = nbia_client.getModalityValues() + + assert isinstance(modality_values, list) + assert len(modality_values) > 0 + assert isinstance(modality_values[0], dict) + assert len(modality_values[0].keys()) == 1 + + assert isinstance(modality_values[0]["Modality"], str) + + +def test_getModalityValuesCounts(nbia_client: NBIAClient): + modality_values_counts = nbia_client.getModalityValues(Counts=True) + + assert isinstance(modality_values_counts, list) + assert len(modality_values_counts) > 0 + assert isinstance(modality_values_counts[0], dict) + assert len(modality_values_counts[0].keys()) == 2 diff --git a/tests/test_dicom_helpers.py b/tests/test_dicom_helpers.py index 3fcfa46..64b3bc5 100644 --- a/tests/test_dicom_helpers.py +++ b/tests/test_dicom_helpers.py @@ -83,9 +83,9 @@ def test_sanitizeFileName_empty_string(): def test_sanitizeFileName_assertions(): with pytest.raises(AssertionError): - sanitizeFileName(None) + sanitizeFileName(None) # type: ignore with pytest.raises(AssertionError): - sanitizeFileName(123) + sanitizeFileName(123) # type: ignore ############################################################################### @@ -113,10 +113,10 @@ def test__truncateUID_with_lastDigits_greater_than_length_of_UID(uid): def test__truncateUID_with_invalid_input_types(uid): lastDigits = "5" with pytest.raises(AssertionError): - _truncateUID(uid, lastDigits) + _truncateUID(uid, lastDigits) # type: ignore def test__truncateUID_with_None_input(uid): lastDigits = None with pytest.raises(AssertionError): - _truncateUID(uid, lastDigits) + _truncateUID(uid, lastDigits) # type: ignore