Skip to content

Commit

Permalink
update maisi (#649)
Browse files Browse the repository at this point in the history
Fixes # .

### Description
A few sentences describing the changes proposed in this pull request.

### Status
**Ready/Work in progress/Hold**

### Please ensure all the checkboxes:
<!--- Put an `x` in all the boxes that apply, and remove the not
applicable items -->
- [x] Codeformat tests passed locally by running `./runtests.sh
--codeformat`.
- [ ] In-line docstrings updated.
- [ ] Update `version` and `changelog` in `metadata.json` if changing an
existing bundle.
- [ ] Please ensure the naming rules in config files meet our
requirements (please refer to: `CONTRIBUTING.md`).
- [ ] Ensure versions of packages such as `monai`, `pytorch` and `numpy`
are correct in `metadata.json`.
- [ ] Descriptions should be consistent with the content, such as
`eval_metrics` of the provided weights and TorchScript modules.
- [ ] Files larger than 25MB are excluded and replaced by providing
download links in `large_file.yml`.
- [ ] Avoid using path that contains personal information within config
files (such as use `/home/your_name/` for `"bundle_root"`).

Signed-off-by: Yiheng Wang <vennw@nvidia.com>
  • Loading branch information
yiheng-wang-nv authored Sep 11, 2024
1 parent 4e31c37 commit c315901
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 48 deletions.
13 changes: 0 additions & 13 deletions ci/run_premerge_gpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,19 +102,6 @@ verify_bundle() {
fi
# verify bundle
python $(pwd)/ci/verify_bundle.py --b "$bundle"
# unzip data and do unit tests
DATA_DIR="$(pwd)/models/maisi_ct_generative/datasets"
ZIP_FILE="$DATA_DIR/all_masks_flexible_size_and_spacing_3000.zip"
UNZIP_DIR="$DATA_DIR/all_masks_flexible_size_and_spacing_3000"
if [ -f "$ZIP_FILE" ]; then
if [ ! -d "$UNZIP_DIR" ]; then
echo "Unzipping files for MAISI Bundle..."
unzip $ZIP_FILE -d $DATA_DIR
echo "Unzipping complete."
else
echo "Unzipped content already exists, continuing..."
fi
fi
test_cmd="python $(pwd)/ci/unit_tests/runner.py --b \"$bundle\""
if [ "$dist_flag" = "True" ]; then
test_cmd="torchrun $(pwd)/ci/unit_tests/runner.py --b \"$bundle\" --dist True"
Expand Down
11 changes: 7 additions & 4 deletions ci/verify_bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,13 @@
exclude_verify_shape_list,
exclude_verify_torchscript_list,
)
from monai.bundle import ckpt_export, create_workflow, verify_metadata, verify_net_in_out
from monai.bundle import ckpt_export, verify_metadata, verify_net_in_out
from monai.bundle.config_parser import ConfigParser
from monai.utils.module import optional_import
from utils import download_large_files, get_json_dict

create_workflow, has_create_workflow = optional_import("monai.bundle", name="create_workflow")

# files that must be included in a bundle
necessary_files_list = ["configs/metadata.json", "LICENSE"]
# files that are preferred to be included in a bundle
Expand Down Expand Up @@ -246,11 +249,11 @@ def check_properties(**kwargs):
kwargs.pop("properties_path", None)
print(kwargs)

workflow = create_workflow(**kwargs)
if app_properties_path is not None and os.path.isfile(app_properties_path):
shutil.copy(app_properties_path, "ci/bundle_properties.py")
from bundle_properties import InferProperties, MetaProperties

workflow = create_workflow(**kwargs)
workflow.properties = {**MetaProperties, **InferProperties}
check_result = workflow.check_properties()
if check_result is not None and len(check_result) > 0:
Expand Down Expand Up @@ -287,7 +290,6 @@ def verify_bundle_properties(model_path: str, bundle: str):
if "supported_apps" in metadata:
supported_apps = metadata["supported_apps"]
all_properties = []
print("vista3d sopperted apps: ", supported_apps)
for app, version in supported_apps.items():
properties_path = get_app_properties(app, version)
if properties_path is not None:
Expand All @@ -299,7 +301,8 @@ def verify_bundle_properties(model_path: str, bundle: str):
check_properties(**check_property_args)
print("successfully checked properties.")
else:
check_properties(**check_property_args)
# skip property check if supported_apps is not provided
pass


def verify(bundle, models_path="models", mode="full"):
Expand Down
5 changes: 3 additions & 2 deletions models/maisi_ct_generative/configs/metadata.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
{
"schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_generator_ldm_20240318.json",
"version": "0.4.0",
"version": "0.4.1",
"changelog": {
"0.4.1": "update large files",
"0.4.0": "update to use monai 1.4, model ckpt updated, rm GenerativeAI repo, add quality check",
"0.3.6": "first oss version"
},
Expand All @@ -11,7 +12,7 @@
"optional_packages_version": {
"fire": "0.6.0",
"nibabel": "5.2.1",
"tqdm": "4.66.2"
"tqdm": "4.66.4"
},
"supported_apps": {
"maisi-nim": ""
Expand Down
66 changes: 44 additions & 22 deletions models/maisi_ct_generative/large_files.yml
Original file line number Diff line number Diff line change
@@ -1,23 +1,45 @@
large_files:
- path: "models/autoencoder_epoch273.pt"
url: "https://drive.google.com/file/d/1Ojw25lFO8QbHkxazdK4CgZTyp3GFNZGz/view?usp=sharing"
- path: "models/input_unet3d_data-all_steps1000size512ddpm_random_current_inputx_v1.pt"
url: "https://drive.google.com/file/d/1lklNv4MTdI_9bwFRMd98QQ7JLerR5gC_/view?usp=drive_link"
- path: "models/controlnet-20datasets-e20wl100fold0bc_noi_dia_fsize_current.pt"
url: "https://drive.google.com/file/d/1mLYeqeZ819_WpZPlAInhcWuCIHgn3QNT/view?usp=drive_link"
- path: "models/mask_generation_autoencoder.pt"
url: "https://drive.google.com/file/d/19JnX-C6QAg4RfghTwpPnj4KEWhtawpCy/view?usp=drive_link"
- path: "models/mask_generation_diffusion_unet.pt"
url: "https://drive.google.com/file/d/1yOQvlhXFGY1ZYavADM3N34vgg5AEitda/view?usp=drive_link"
- path: "configs/candidate_masks_flexible_size_and_spacing_3000.json"
url: "https://drive.google.com/file/d/1yMkH-lrAsn2YUGoTuVKNMpicziUmU-1J/view?usp=sharing"
- path: "configs/all_anatomy_size_condtions.json"
url: "https://drive.google.com/file/d/1AJyt1DSoUd2x2AOQOgM7IxeSyo4MXNX0/view?usp=sharing"
- path: "datasets/all_masks_flexible_size_and_spacing_3000.zip"
url: "https://drive.google.com/file/d/16MKsDKkHvDyF2lEir4dzlxwex_GHStUf/view?usp=sharing"
- path: "datasets/IntegrationTest-AbdomenCT.nii.gz"
url: "https://drive.google.com/file/d/1OTgt_dyBgvP52krKRXWXD3u0L5Zbj5JR/view?usp=share_link"
- path: "datasets/C4KC-KiTS_subset.zip"
url: "https://drive.google.com/file/d/1r62pLTowfrHhKW5YPl5pWygIDZSOI-VT/view?usp=sharing"
- path: "datasets/C4KC-KiTS_subset.json"
url: "https://drive.google.com/file/d/1tzpglihyZwlJcuEYJQeuB4zW8UrXyNO3/view?usp=sharing"
- path: "models/autoencoder_epoch273.pt"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_autoencoder_epoch273_alternative.pt"
hash_val: "917cfb1e49631c8a713e3bb7c758fbca"
hash_type: "md5"
- path: "models/input_unet3d_data-all_steps1000size512ddpm_random_current_inputx_v1.pt"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_input_unet3d_data-all_steps1000size512ddpm_random_current_inputx_v1_alternative.pt"
hash_val: "623bd02ff223b70d280cc994fcb70a69"
hash_type: "md5"
- path: "models/controlnet-20datasets-e20wl100fold0bc_noi_dia_fsize_current.pt"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_controlnet-20datasets-e20wl100fold0bc_noi_dia_fsize_current_alternative.pt"
hash_val: "6c36572335372f405a0e85c760fa6dee"
hash_type: "md5"
- path: "models/mask_generation_autoencoder.pt"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/mask_generation_autoencoder.pt"
hash_val: "b177778820f412abc9218cdb7ce3b653"
hash_type: "md5"
- path: "models/mask_generation_diffusion_unet.pt"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_mask_generation_diffusion_unet_v2.pt"
hash_val: "e96310b960a748d68737d190712fbb80"
hash_type: "md5"
- path: "configs/candidate_masks_flexible_size_and_spacing_3000.json"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/candidate_masks_flexible_size_and_spacing_3000.json"
hash_val: "cacb2f92b9f31270b0161412655be62b"
hash_type: "md5"
- path: "configs/all_anatomy_size_condtions.json"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/all_anatomy_size_condtions.json"
hash_val: "fe37057ebac067fb2d17cc8cb921534b"
hash_type: "md5"
- path: "datasets/all_masks_flexible_size_and_spacing_3000.zip"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_all_masks_flexible_size_and_spacing_3000.zip"
hash_val: "081827e9b9f55b1abbeb94682ac52d36"
hash_type: "md5"
- path: "datasets/IntegrationTest-AbdomenCT.nii.gz"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_Case_00047_0000_zs0_pseudo_label_wbdm.nii.gz"
hash_val: "d3e7a212ac5afc51c4b7e3d6c1d6413f"
hash_type: "md5"
- path: "datasets/C4KC-KiTS_subset.zip"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_C4KC-KiTS_subset.zip"
hash_val: "1e2279fd613ddf3a2a5f787180d171ca"
hash_type: "md5"
- path: "datasets/C4KC-KiTS_subset.json"
url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_maisi_C4KC-KiTS_subset.json"
hash_val: "2b8a0aebc31f5dcae5fc496d92e2beb9"
hash_type: "md5"
24 changes: 17 additions & 7 deletions models/maisi_ct_generative/scripts/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,23 @@


def unzip_dataset(dataset_dir):
if not os.path.exists(dataset_dir):
zip_file_path = dataset_dir + ".zip"
if not os.path.isfile(zip_file_path):
raise ValueError(f"Please downloaded {zip_file_path}.")
with zipfile.ZipFile(zip_file_path, "r") as zip_ref:
zip_ref.extractall(path=os.path.dirname(dataset_dir))
print(f"Unzipped {zip_file_path} to {dataset_dir}.")
if dist.is_available() and dist.is_initialized():
rank = dist.get_rank()
else:
rank = 0

if rank == 0:
if not os.path.exists(dataset_dir):
zip_file_path = dataset_dir + ".zip"
if not os.path.isfile(zip_file_path):
raise ValueError(f"Please download {zip_file_path}.")
with zipfile.ZipFile(zip_file_path, "r") as zip_ref:
zip_ref.extractall(path=os.path.dirname(dataset_dir))
print(f"Unzipped {zip_file_path} to {dataset_dir}.")

if dist.is_available() and dist.is_initialized():
dist.barrier() # Synchronize all processes

return


Expand Down

0 comments on commit c315901

Please sign in to comment.