-
Notifications
You must be signed in to change notification settings - Fork 12
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Reorganize package dependencies (#506)
* Update dependencies and imports for MONAI and typing * Refactor MedicalImage and MedicalImageFolder classes * allow init of MedicalImage class; raise error in methods * Refactor import_optional_module function to allow importing module attributes * Refactor MedicalImage optional module imports * Update dependencies in pyproject.toml * Add test for MedicalImage feature without MONAI * Prevent use of txrv_transforms method at runtime if MONAI is not installed * Move report package deps to core installation * Adjust package installation tests * Formatting fix * Remove report package test action * Formatting fix * remove txrv_transforms, add dictionary wrapper for torchvision transforms, and remove monai deps from monitor package * fix repr for transform * fix Dictd call func * fix monitor-api notebook * Update imports for image transforms * Update metadata for cxr_classification.ipynb Signed-off-by: Franklin <41602287+fcogidi@users.noreply.github.com> * fix transforms in notebooks --------- Signed-off-by: Franklin <41602287+fcogidi@users.noreply.github.com> Co-authored-by: Amrit K <amritk@vectorinstitute.ai> Co-authored-by: akore <akore0x5f@gmail.com>
- Loading branch information
1 parent
b5ecf98
commit 8a0ef2f
Showing
26 changed files
with
1,268 additions
and
1,115 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,28 +1,84 @@ | ||
"""Transforms for the datasets.""" | ||
from typing import Any, Callable, Tuple | ||
|
||
from typing import Tuple | ||
|
||
from monai.transforms import Lambdad, Resized, ToDeviced # type: ignore | ||
from torchvision.transforms import Compose | ||
|
||
|
||
def txrv_transforms( | ||
keys: Tuple[str, ...] = ("features",), | ||
device: str = "cpu", | ||
) -> Compose: | ||
"""Set of transforms for the models in the TXRV library.""" | ||
return Compose( | ||
[ | ||
Resized( | ||
keys=keys, | ||
spatial_size=(1, 224, 224), | ||
allow_missing_keys=True, | ||
), | ||
Lambdad( | ||
keys=keys, | ||
func=lambda x: ((2 * (x / 255.0)) - 1.0) * 1024, | ||
allow_missing_keys=True, | ||
), | ||
ToDeviced(keys=keys, device=device, allow_missing_keys=True), | ||
], | ||
) | ||
from torchvision.transforms import Lambda, Resize | ||
|
||
|
||
# generic dictionary-based wrapper for any transform | ||
class Dictd: | ||
"""Generic dictionary-based wrapper for any transform.""" | ||
|
||
def __init__( | ||
self, | ||
transform: Callable[..., Any], | ||
keys: Tuple[str, ...], | ||
allow_missing_keys: bool = False, | ||
): | ||
self.transform = transform | ||
self.keys = keys | ||
self.allow_missing_keys = allow_missing_keys | ||
|
||
def __call__(self, data: Any) -> Any: | ||
"""Apply the transform to the data.""" | ||
for key in self.keys: | ||
if self.allow_missing_keys and key not in data: | ||
continue | ||
data[key] = self.transform(data[key]) | ||
return data | ||
|
||
def __repr__(self) -> str: | ||
"""Return a string representation of the transform.""" | ||
return ( | ||
f"{self.__class__.__name__}(transform={self.transform}, " | ||
f"keys={self.keys}, allow_missing_keys={self.allow_missing_keys})" | ||
) | ||
|
||
|
||
# dictionary-based wrapper of Lambda transform using Dictd | ||
class Lambdad: | ||
"""Dictionary-based wrapper of Lambda transform using Dictd.""" | ||
|
||
def __init__( | ||
self, | ||
func: Callable[..., Any], | ||
keys: Tuple[str, ...], | ||
allow_missing_keys: bool = False, | ||
): | ||
self.transform = Dictd( | ||
transform=Lambda(func), | ||
keys=keys, | ||
allow_missing_keys=allow_missing_keys, | ||
) | ||
|
||
def __call__(self, data: Any) -> Any: | ||
"""Apply the transform to the data.""" | ||
return self.transform(data) | ||
|
||
def __repr__(self) -> str: | ||
"""Return a string representation of the transform.""" | ||
return f"{self.__class__.__name__}(keys={self.transform.keys}, allow_missing_keys={self.transform.allow_missing_keys})" | ||
|
||
|
||
# dictionary-based wrapper of Resize transform using Dictd | ||
class Resized: | ||
"""Dictionary-based wrapper of Resize transform using Dictd.""" | ||
|
||
def __init__( | ||
self, | ||
spatial_size: Tuple[int, int], | ||
keys: Tuple[str, ...], | ||
allow_missing_keys: bool = False, | ||
): | ||
self.transform = Dictd( | ||
transform=Resize(size=spatial_size), | ||
keys=keys, | ||
allow_missing_keys=allow_missing_keys, | ||
) | ||
|
||
def __call__(self, data: Any) -> Any: | ||
"""Apply the transform to the data.""" | ||
return self.transform(data) | ||
|
||
def __repr__(self) -> str: | ||
"""Return a string representation of the transform.""" | ||
return f"{self.__class__.__name__}(keys={self.transform.keys}, allow_missing_keys={self.transform.allow_missing_keys})" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.