diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..b1670a1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,26 @@ +BSD 2-Clause License + +Copyright (c) 2021, dbouget +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/README.md b/README.md index 5d41397..4a36897 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,33 @@ -# Raidionics-Slicer +
+

Raidionics plugin (3D Slicer)

+

Open plugin for AI-based segmentation and standardized reporting for neuro and mediastinal applications

+ +[![License](https://img.shields.io/badge/License-BSD%202--Clause-orange.svg)](https://opensource.org/licenses/BSD-2-Clause) +[![Paper](https://zenodo.org/badge/DOI/10.1038/s41598-023-42048-7.svg)](https://doi.org/10.1038/s41598-023-42048-7) +[![Paper](https://zenodo.org/badge/DOI/10.3389/fneur.2022.932219.svg)](https://www.frontiersin.org/articles/10.3389/fneur.2022.932219/full) + + Plugin for 3D Slicer to use the segmentation models and clinical reporting techniques (RADS) packaged in Raidionics. +A paper presenting the software and some benchmarks has been published in [Scientific Reports](https://doi.org/10.1038/s41598-023-42048-7). The plugin was first introduced in the article _"Brain tumor preoperative surgery imaging: models and software solutions for segmentation and standardized reporting"_, published in [Frontiers in Neurology](https://www.frontiersin.org/articles/10.3389/fneur.2022.932219/full).

+
-
- -## Installation - +## [Installation](https://github.com/raidionics/Raidionics-Slicer#installation) + +The plugin has been tested with the stable release 5.6.1 of 3D Slicer, and the upcoming release 5.7.0. +A [step-by-step video](https://www.youtube.com/watch?v=NStMzLcj1WE) for installing the plugin and running a segmentation model for the first time is available. -The plugin has been tested with the stable release 5.2.2 of 3D Slicer. +
+ +### [Step-by-step](https://github.com/raidionics/Raidionics-Slicer#step-by-step) + * Download 3D Slicer for your running operating system from [here](https://download.slicer.org/). * Download the plugin source code through either: @@ -25,14 +38,14 @@ The plugin has been tested with the stable release 5.2.2 of 3D Slicer. * Download and install Docker (see [below](https://github.com/raidionics/Raidionics-Slicer#docker-setup--)). The necessary _Raidionics_ Docker image should be collected automatically when downloading a model for the first time. Please do the following if it did not happen correctly: - > docker pull dbouget/raidionics-rads:v1.1 + > docker pull dbouget/raidionics-rads:v1.1-py38-cpu * Load the plugin into 3D Slicer: ∘ All Modules > Extension Wizard. ∘ Developer Tools > Extension Wizard. - ∘ Select Extension > point to the folder (second Raidionics) and add it to the path (tick the small box at the bottom). + ∘ Select Extension > point to the first Raidionics subfolder (inside Raidionics-Slicer) and add it to the path (tick the small box at the bottom). -* Restart 3D Slicer to setup Python environment. +* :warning: Restarting 3D Slicer to setup Python environment might be necessary on some occasions. * Raidionics will appear in the list of modules inside the _Machine Learning_ category @@ -41,7 +54,8 @@ The plugin has been tested with the stable release 5.2.2 of 3D Slicer.
-## How to cite +## [How to cite](https://github.com/raidionics/Raidionics-Slicer#how-to-cite) + If you are using Raidionics-Slicer in your research, please use the following citation: ``` @@ -61,27 +75,30 @@ If you are using Raidionics-Slicer in your research, please use the following ci
-## Methodological background +## [Methodological background](https://github.com/raidionics/Raidionics-Slicer#methodological-background) + More information about the different models provided and architectures used can be accessed from the below-listed publications. ### Neuro * AGU-Net neural network architecture => [Meningioma Segmentation in T1-Weighted MRI Leveraging Global Context and Attention Mechanisms](https://www.frontiersin.org/articles/10.3389/fradi.2021.711514/full) - * Standardized reporting and Data System => [Glioblastoma Surgery Imaging—Reporting and Data System: Standardized Reporting of Tumor Volume, Location, and Resectability Based on Automated Segmentations ](https://www.mdpi.com/2072-6694/13/12/2854) - -* Segmentation performance => [Glioblastoma Surgery Imaging–Reporting and Data System: Validation and Performance of the Automated Segmentation Task ](https://www.mdpi.com/2072-6694/13/18/4674) +* Preoperative GBM segmentation performance => [Glioblastoma Surgery Imaging–Reporting and Data System: Validation and Performance of the Automated Segmentation Task ](https://www.mdpi.com/2072-6694/13/18/4674) +* Postoperative GBM segmentation performance => [Segmentation of glioblastomas in early post-operative multi-modal MRI with deep neural networks](https://www.nature.com/articles/s41598-023-45456-x) +* Preoperative CNS segmentation performance => [Preoperative brain tumor imaging: models and software for segmentation and standardized reporting](https://www.frontiersin.org/journals/neurology/articles/10.3389/fneur.2022.932219/full) ### Mediastinum * Mediastinum organs segmentation => [Semantic segmentation and detection of mediastinal lymph nodes and anatomical structures in CT data for lung cancer staging](https://link.springer.com/article/10.1007/s11548-019-01948-8) * Lymph nodes segmentation => [Mediastinal lymph nodes segmentation using 3D convolutional neural network ensembles and anatomical priors guiding](https://www.tandfonline.com/doi/pdf/10.1080/21681163.2022.2043778) +* Airways segmentation => [AeroPath: An airway segmentation benchmark dataset with challenging pathology](https://arxiv.org/abs/2311.01138)
-## Docker setup +## [Docker setup](https://github.com/raidionics/Raidionics-Slicer#docker-setup) + A proper Docker setup is **mandatory** since all processing is performed within a Docker image. 3D Slicer is only used for its graphical user interface. @@ -107,7 +124,7 @@ All images will be automatically downloaded upon model selection, which might take some minutes while the 3D Slicer interface won't be responding. * The main Docker image can also be downloaded manually by: - > docker pull dbouget/raidionics-rads:v1.1 + > docker pull dbouget/raidionics-rads:v1.1-py38-cpu * When you execute for the first time, you might get a pop-up from Docker asking to allow the sharing of a `.raidonics-slicer/` directory, accept! diff --git a/Raidionics/Raidionics/src/RaidionicsLogic.py b/Raidionics/Raidionics/src/RaidionicsLogic.py index b9fa218..6052ae1 100644 --- a/Raidionics/Raidionics/src/RaidionicsLogic.py +++ b/Raidionics/Raidionics/src/RaidionicsLogic.py @@ -275,7 +275,7 @@ def executeDocker(self, dockerName, modelName, dataPath, iodict, inputs, outputs else: widgetPresent = False - dataPath = '/home/ubuntu/resources' + dataPath = '/workspace/resources' # Cleaning input/output folders for every run if os.path.exists(SharedResources.getInstance().data_path): @@ -383,14 +383,16 @@ def executeDocker(self, dockerName, modelName, dataPath, iodict, inputs, outputs cmd = list() cmd.append(self.dockerPath) - cmd.extend(('run', '-t', '-v')) + cmd.extend(('run', '-t')) # if self.use_gpu: # cmd.append(' --runtime=nvidia ') - #cmd.append(TMP_PATH + ':' + dataPath) + cmd.append('--user') + cmd.append(str(os.getuid())) + cmd.append('-v') cmd.append(SharedResources.getInstance().resources_path + ':' + dataPath) cmd.append(dockerName) cmd.append('-c') - cmd.append('/home/ubuntu/resources/data/rads_config.ini') + cmd.append('/workspace/resources/data/rads_config.ini') cmd.append('-v') cmd.append('debug') diff --git a/Raidionics/Raidionics/src/utils/backend_utilities.py b/Raidionics/Raidionics/src/utils/backend_utilities.py index 83dcc9a..455da53 100644 --- a/Raidionics/Raidionics/src/utils/backend_utilities.py +++ b/Raidionics/Raidionics/src/utils/backend_utilities.py @@ -31,13 +31,13 @@ def generate_backend_config(input_folder: str, parameters, logic_target_space: s rads_config.set('Default', 'caller', '') rads_config.add_section('System') rads_config.set('System', 'gpu_id', "-1") # Always running on CPU - rads_config.set('System', 'input_folder', '/home/ubuntu/resources/data') - rads_config.set('System', 'output_folder', '/home/ubuntu/resources/output') - rads_config.set('System', 'model_folder', '/home/ubuntu/resources/models') - rads_config.set('System', 'pipeline_filename', '/home/ubuntu/resources/models/' + model_name + '/pipeline.json') + rads_config.set('System', 'input_folder', '/workspace/resources/data') + rads_config.set('System', 'output_folder', '/workspace/resources/output') + rads_config.set('System', 'model_folder', '/workspace/resources/models') + rads_config.set('System', 'pipeline_filename', '/workspace/resources/models/' + model_name + '/pipeline.json') if logic_task == 'reporting': rads_config.set('System', 'pipeline_filename', - '/home/ubuntu/resources/reporting/' + parameters['UserConfiguration']['default']) + '/workspace/resources/reporting/' + parameters['UserConfiguration']['default']) rads_config.add_section('Runtime') rads_config.set('Runtime', 'reconstruction_method', SharedResources.getInstance().user_configuration['Predictions']['reconstruction_method']) diff --git a/Raidionics/Raidionics/src/utils/io_utilities.py b/Raidionics/Raidionics/src/utils/io_utilities.py index 0020434..bc55cc0 100644 --- a/Raidionics/Raidionics/src/utils/io_utilities.py +++ b/Raidionics/Raidionics/src/utils/io_utilities.py @@ -38,12 +38,21 @@ def get_available_cloud_models_list() -> List[List[str]]: Each model list element corresponds to the following headers: Item,link,dependencies,sum. """ cloud_models_list = [] - # cloud_models_list_url = 'https://drive.google.com/uc?id=1wVjqpQ7S3xTcNJyV2Sp_hSyKglcxfQLe' - cloud_models_list_url = 'https://drive.google.com/uc?id=1uibFBPBQywX7EGK5G_Oc6CXlDSiOePKF' + # cloud_models_list_url = 'https://drive.google.com/uc?id=1uibFBPBQywX7EGK5G_Oc6CXlDSiOePKF' + cloud_models_list_url = 'https://github.com/raidionics/Raidionics-models/releases/download/rsv1.1.1/Slicer_cloud_models_list.csv' try: cloud_models_list_filename = os.path.join(SharedResources.getInstance().json_cloud_dir, 'cloud_models_list.csv') - gdown.cached_download(url=cloud_models_list_url, path=cloud_models_list_filename) + headers = {} + + response = requests.get(cloud_models_list_url, headers=headers, stream=True) + response.raise_for_status() + + if response.status_code == requests.codes.ok: + with open(cloud_models_list_filename, "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) line_count = 0 + with open(cloud_models_list_filename) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') for row in csv_reader: @@ -103,9 +112,16 @@ def download_cloud_model(selected_model): '-')) + '.zip') os.makedirs(os.path.join(SharedResources.getInstance().Raidionics_dir, '.cache'), exist_ok=True) - gdown.cached_download(url=model_config_url, - path=os.path.join(SharedResources.getInstance().json_local_dir, - '_'.join(selected_model[1:-1].split('][')) + '.json')) + headers = {} + + response = requests.get(model_config_url, headers=headers, stream=True) + response.raise_for_status() + + if response.status_code == requests.codes.ok: + with open(os.path.join(SharedResources.getInstance().json_local_dir, + '_'.join(selected_model[1:-1].split('][')) + '.json'), "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) if not os.path.exists(archive_dl_dest) or hashlib.md5( open(archive_dl_dest, 'rb').read()).hexdigest() != model_checksum: @@ -190,10 +206,19 @@ def check_local_model_for_update(selected_model): def get_available_cloud_diagnoses_list(): cloud_diagnoses_list = [] - cloud_diagnoses_list_url = 'https://drive.google.com/uc?id=1lFlfUGxiHxykmf_2keLhXX6k2PG5jn6M' + # cloud_diagnoses_list_url = 'https://drive.google.com/uc?id=1lFlfUGxiHxykmf_2keLhXX6k2PG5jn6M' + cloud_diagnoses_list_url = 'https://github.com/raidionics/Raidionics-models/releases/download/rsv1.1.1/Slicer_cloud_pipelines_list.csv' try: cloud_diagnoses_list_filename = os.path.join(SharedResources.getInstance().json_cloud_dir, 'cloud_diagnoses_list.csv') - gdown.cached_download(url=cloud_diagnoses_list_url, path=cloud_diagnoses_list_filename) + headers = {} + response = requests.get(cloud_diagnoses_list_url, headers=headers, stream=True) + response.raise_for_status() + + if response.status_code == requests.codes.ok: + with open(cloud_diagnoses_list_filename, "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) + line_count = 0 with open(cloud_diagnoses_list_filename) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') @@ -228,14 +253,30 @@ def check_local_diagnosis_for_update(selected_diagnosis): dl_dest = os.path.join(SharedResources.getInstance().Raidionics_dir, '.cache', str('_'.join(selected_diagnosis.split(']')[:-1]).replace('[', '').replace('/', '-')) + '.json') - gdown.cached_download(url=diagnosis_url, path=dl_dest, md5=diagnosis_md5sum) + + headers = {} + response = requests.get(diagnosis_url, headers=headers, stream=True) + response.raise_for_status() + + if response.status_code == requests.codes.ok: + with open(dl_dest, "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) + shutil.copy(src=dl_dest, dst=os.path.join(json_local_dir, os.path.basename(dl_dest))) diagnosis_dir = SharedResources.getInstance().diagnosis_path dl_dest = os.path.join(diagnosis_dir, str('_'.join(selected_diagnosis.split(']')[:-1]).replace('[', '').replace('/', '-')) + '.json') - gdown.cached_download(url=diagnosis_pipeline_url, path=dl_dest) + headers = {} + response = requests.get(diagnosis_pipeline_url, headers=headers, stream=True) + response.raise_for_status() + + if response.status_code == requests.codes.ok: + with open(dl_dest, "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) # Checking if dependencies must be updated. if len(diagnosis_dependencies) > 0: @@ -268,7 +309,15 @@ def download_cloud_diagnosis(selected_diagnosis): dl_dest = os.path.join(SharedResources.getInstance().Raidionics_dir, '.cache', str('_'.join(selected_diagnosis.split(']')[:-1]).replace('[', '').replace('/', '-')) + '.json') - gdown.cached_download(url=diagnosis_url, path=dl_dest, md5=diagnosis_checksum) + headers = {} + response = requests.get(diagnosis_url, headers=headers, stream=True) + response.raise_for_status() + + if response.status_code == requests.codes.ok: + with open(dl_dest, "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) + shutil.copy(src=dl_dest, dst=os.path.join(json_local_dir, os.path.basename(dl_dest))) # Checking if dependencies are needed and if they exist already locally, otherwise triggers a download @@ -332,8 +381,13 @@ def download_cloud_model(self, selected_model): '-')) + '.zip') os.makedirs(os.path.join(SharedResources.getInstance().Raidionics_dir, '.cache'), exist_ok=True) - gdown.cached_download(url=model_config_url, - path=os.path.join(SharedResources.getInstance().json_local_dir, '_'.join(selected_model[1:-1].split('][')) + '.json')) + headers = {} + response = requests.get(model_config_url, headers=headers, stream=True) + response.raise_for_status() + if response.status_code == requests.codes.ok: + with open(os.path.join(SharedResources.getInstance().json_local_dir, '_'.join(selected_model[1:-1].split('][')) + '.json'), "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) if not os.path.exists(archive_dl_dest) or hashlib.md5(open(archive_dl_dest, 'rb').read()).hexdigest() != model_checksum: download_state = True @@ -395,7 +449,14 @@ def download_cloud_diagnosis2(self, selected_diagnosis): str('_'.join(selected_diagnosis.split(']')[:-1]).replace('[', '').replace('/', '-')) + '.json') - gdown.cached_download(url=diagnosis_url, path=dl_dest, md5=diagnosis_md5sum) + headers = {} + response = requests.get(diagnosis_url, headers=headers, stream=True) + response.raise_for_status() + if response.status_code == requests.codes.ok: + with open(dl_dest, "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) + shutil.copy(src=dl_dest, dst=os.path.join(json_local_dir, os.path.basename(dl_dest))) diagnosis_dir = SharedResources.getInstance().diagnosis_path @@ -403,7 +464,13 @@ def download_cloud_diagnosis2(self, selected_diagnosis): str('_'.join(selected_diagnosis.split(']')[:-1]).replace('[', '').replace('/', '-')) + '_pipeline.json') - gdown.cached_download(url=diagnosis_pipeline_url, path=dl_dest) + headers = {} + response = requests.get(diagnosis_pipeline_url, headers=headers, stream=True) + response.raise_for_status() + if response.status_code == requests.codes.ok: + with open(dl_dest, "wb") as f: + for chunk in response.iter_content(chunk_size=1048576): + f.write(chunk) # Checking if dependencies are needed and if they exist already locally, otherwise triggers a download if len(diagnosis_dependencies) > 0: