diff --git a/CHANGELOG.md b/CHANGELOG.md index 5fe29befe..52c1ebb9a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Upcoming ### Deprecations +* The following classes and objects are now private `NWBMetaDataEncoder`, `NWBMetaDataEncoder`, `check_if_imaging_fits_into_memory`, `NoDatesSafeLoader` [PR #1050](https://github.com/catalystneuro/neuroconv/pull/1050) ### Features diff --git a/src/neuroconv/basedatainterface.py b/src/neuroconv/basedatainterface.py index 4e0a0aac4..f8fce62b0 100644 --- a/src/neuroconv/basedatainterface.py +++ b/src/neuroconv/basedatainterface.py @@ -19,7 +19,7 @@ ) from .tools.nwb_helpers._metadata_and_file_helpers import _resolve_backend from .utils import ( - NWBMetaDataEncoder, + _NWBMetaDataEncoder, get_json_schema_from_method_signature, load_dict_from_file, ) @@ -63,7 +63,7 @@ def get_metadata(self) -> DeepDict: def validate_metadata(self, metadata: dict, append_mode: bool = False) -> None: """Validate the metadata against the schema.""" - encoder = NWBMetaDataEncoder() + encoder = _NWBMetaDataEncoder() # The encoder produces a serialized object, so we deserialized it for comparison serialized_metadata = encoder.encode(metadata) diff --git a/src/neuroconv/datainterfaces/behavior/neuralynx/neuralynx_nvt_interface.py b/src/neuroconv/datainterfaces/behavior/neuralynx/neuralynx_nvt_interface.py index 51e04821d..f01d11e53 100644 --- a/src/neuroconv/datainterfaces/behavior/neuralynx/neuralynx_nvt_interface.py +++ b/src/neuroconv/datainterfaces/behavior/neuralynx/neuralynx_nvt_interface.py @@ -8,7 +8,7 @@ from .nvt_utils import read_data, read_header from ....basetemporalalignmentinterface import BaseTemporalAlignmentInterface -from ....utils import DeepDict, NWBMetaDataEncoder, get_base_schema +from ....utils import DeepDict, _NWBMetaDataEncoder, get_base_schema from ....utils.path import infer_path @@ -136,7 +136,7 @@ def add_to_nwbfile( unit="pixels", conversion=1.0, timestamps=self.get_timestamps(), - description=f"Pixel x and y coordinates from the .nvt file with header data: {json.dumps(self.header, cls=NWBMetaDataEncoder)}", + description=f"Pixel x and y coordinates from the .nvt file with header data: {json.dumps(self.header, cls=_NWBMetaDataEncoder)}", ) nwbfile.add_acquisition(Position([spatial_series], name="NvtPosition")) @@ -151,7 +151,7 @@ def add_to_nwbfile( unit="degrees", conversion=1.0, timestamps=spatial_series if add_position else self.get_timestamps(), - description=f"Angle from the .nvt file with header data: {json.dumps(self.header, cls=NWBMetaDataEncoder)}", + description=f"Angle from the .nvt file with header data: {json.dumps(self.header, cls=_NWBMetaDataEncoder)}", ), name="NvtCompassDirection", ) diff --git a/src/neuroconv/datainterfaces/ecephys/mearec/mearecdatainterface.py b/src/neuroconv/datainterfaces/ecephys/mearec/mearecdatainterface.py index 9d3797e0f..7a82025ca 100644 --- a/src/neuroconv/datainterfaces/ecephys/mearec/mearecdatainterface.py +++ b/src/neuroconv/datainterfaces/ecephys/mearec/mearecdatainterface.py @@ -3,7 +3,7 @@ from pydantic import FilePath from ..baserecordingextractorinterface import BaseRecordingExtractorInterface -from ....utils.json_schema import NWBMetaDataEncoder +from ....utils.json_schema import _NWBMetaDataEncoder class MEArecRecordingInterface(BaseRecordingExtractorInterface): @@ -61,7 +61,7 @@ def get_metadata(self) -> dict: for unneeded_key in ["fs", "dtype"]: recording_metadata.pop(unneeded_key) metadata["Ecephys"].update( - {self.es_key: dict(name=self.es_key, description=json.dumps(recording_metadata, cls=NWBMetaDataEncoder))} + {self.es_key: dict(name=self.es_key, description=json.dumps(recording_metadata, cls=_NWBMetaDataEncoder))} ) return metadata diff --git a/src/neuroconv/nwbconverter.py b/src/neuroconv/nwbconverter.py index cb62e149d..eabf6d772 100644 --- a/src/neuroconv/nwbconverter.py +++ b/src/neuroconv/nwbconverter.py @@ -29,7 +29,7 @@ unroot_schema, ) from .utils.dict import DeepDict -from .utils.json_schema import NWBMetaDataEncoder, NWBSourceDataEncoder +from .utils.json_schema import _NWBMetaDataEncoder, _NWBSourceDataEncoder class NWBConverter: @@ -63,7 +63,7 @@ def validate_source(cls, source_data: dict[str, dict], verbose: bool = True): def _validate_source_data(self, source_data: dict[str, dict], verbose: bool = True): - encoder = NWBSourceDataEncoder() + encoder = _NWBSourceDataEncoder() # The encoder produces a serialized object, so we deserialized it for comparison serialized_source_data = encoder.encode(source_data) @@ -104,7 +104,7 @@ def get_metadata(self) -> DeepDict: def validate_metadata(self, metadata: dict[str, dict], append_mode: bool = False): """Validate metadata against Converter metadata_schema.""" - encoder = NWBMetaDataEncoder() + encoder = _NWBMetaDataEncoder() # The encoder produces a serialized object, so we deserialized it for comparison serialized_metadata = encoder.encode(metadata) decoded_metadata = json.loads(serialized_metadata) diff --git a/src/neuroconv/tools/roiextractors/__init__.py b/src/neuroconv/tools/roiextractors/__init__.py index 5e009fe6d..181bbe38c 100644 --- a/src/neuroconv/tools/roiextractors/__init__.py +++ b/src/neuroconv/tools/roiextractors/__init__.py @@ -1,5 +1,5 @@ from .roiextractors import ( - check_if_imaging_fits_into_memory, + _check_if_imaging_fits_into_memory, get_nwb_imaging_metadata, get_nwb_segmentation_metadata, add_background_fluorescence_traces, diff --git a/src/neuroconv/tools/roiextractors/roiextractors.py b/src/neuroconv/tools/roiextractors/roiextractors.py index 4da660914..618d30b4a 100644 --- a/src/neuroconv/tools/roiextractors/roiextractors.py +++ b/src/neuroconv/tools/roiextractors/roiextractors.py @@ -558,7 +558,7 @@ def add_photon_series_to_nwbfile( return nwbfile -def check_if_imaging_fits_into_memory(imaging: ImagingExtractor) -> None: +def _check_if_imaging_fits_into_memory(imaging: ImagingExtractor) -> None: """ Raise an error if the full traces of an imaging extractor are larger than available memory. @@ -625,7 +625,7 @@ def data_generator(imaging): iterator_options = dict() if iterator_options is None else iterator_options if iterator_type is None: - check_if_imaging_fits_into_memory(imaging=imaging) + _check_if_imaging_fits_into_memory(imaging=imaging) return imaging.get_video().transpose((0, 2, 1)) if iterator_type == "v1": diff --git a/src/neuroconv/tools/testing/data_interface_mixins.py b/src/neuroconv/tools/testing/data_interface_mixins.py index 9f94091d4..24042feee 100644 --- a/src/neuroconv/tools/testing/data_interface_mixins.py +++ b/src/neuroconv/tools/testing/data_interface_mixins.py @@ -33,7 +33,7 @@ configure_backend, get_default_backend_configuration, ) -from neuroconv.utils import NWBMetaDataEncoder +from neuroconv.utils import _NWBMetaDataEncoder class DataInterfaceTestMixin: @@ -98,7 +98,7 @@ def check_metadata(self): if "session_start_time" not in metadata["NWBFile"]: metadata["NWBFile"].update(session_start_time=datetime.now().astimezone()) # handle json encoding of datetimes and other tricky types - metadata_for_validation = json.loads(json.dumps(metadata, cls=NWBMetaDataEncoder)) + metadata_for_validation = json.loads(json.dumps(metadata, cls=_NWBMetaDataEncoder)) validate(metadata_for_validation, schema) self.check_extracted_metadata(metadata) diff --git a/src/neuroconv/utils/__init__.py b/src/neuroconv/utils/__init__.py index 1670eb60f..f59cf59c5 100644 --- a/src/neuroconv/utils/__init__.py +++ b/src/neuroconv/utils/__init__.py @@ -7,7 +7,7 @@ load_dict_from_file, ) from .json_schema import ( - NWBMetaDataEncoder, + _NWBMetaDataEncoder, fill_defaults, get_base_schema, get_metadata_schema_for_icephys, diff --git a/src/neuroconv/utils/dict.py b/src/neuroconv/utils/dict.py index 0a92520f7..f0507b653 100644 --- a/src/neuroconv/utils/dict.py +++ b/src/neuroconv/utils/dict.py @@ -12,7 +12,7 @@ from pydantic import FilePath -class NoDatesSafeLoader(yaml.SafeLoader): +class _NoDatesSafeLoader(yaml.SafeLoader): """Custom override of yaml Loader class for datetime considerations.""" @classmethod @@ -33,7 +33,7 @@ def remove_implicit_resolver(cls, tag_to_remove): ] -NoDatesSafeLoader.remove_implicit_resolver("tag:yaml.org,2002:timestamp") +_NoDatesSafeLoader.remove_implicit_resolver("tag:yaml.org,2002:timestamp") def load_dict_from_file(file_path: FilePath) -> dict: @@ -44,7 +44,7 @@ def load_dict_from_file(file_path: FilePath) -> dict: if file_path.suffix in (".yml", ".yaml"): with open(file=file_path, mode="r") as stream: - dictionary = yaml.load(stream=stream, Loader=NoDatesSafeLoader) + dictionary = yaml.load(stream=stream, Loader=_NoDatesSafeLoader) elif file_path.suffix == ".json": with open(file=file_path, mode="r") as fp: dictionary = json.load(fp=fp) diff --git a/src/neuroconv/utils/json_schema.py b/src/neuroconv/utils/json_schema.py index 1e8d5d4d4..6c1ba7245 100644 --- a/src/neuroconv/utils/json_schema.py +++ b/src/neuroconv/utils/json_schema.py @@ -16,7 +16,7 @@ from pynwb.icephys import IntracellularElectrode -class NWBMetaDataEncoder(json.JSONEncoder): +class _NWBMetaDataEncoder(json.JSONEncoder): """ Custom JSON encoder for NWB metadata. @@ -43,7 +43,7 @@ def default(self, obj): return super().default(obj) -class NWBSourceDataEncoder(NWBMetaDataEncoder): +class _NWBSourceDataEncoder(_NWBMetaDataEncoder): """ Custom JSON encoder for data interface source data (i.e. kwargs). @@ -350,7 +350,7 @@ def get_metadata_schema_for_icephys(): def validate_metadata(metadata: dict[str, dict], schema: dict[str, dict], verbose: bool = False): """Validate metadata against a schema.""" - encoder = NWBMetaDataEncoder() + encoder = _NWBMetaDataEncoder() # The encoder produces a serialized object, so we deserialized it for comparison serialized_metadata = encoder.encode(metadata) diff --git a/tests/test_minimal/test_tools/test_expand_paths.py b/tests/test_minimal/test_tools/test_expand_paths.py index 2667602d5..9e7f03631 100644 --- a/tests/test_minimal/test_tools/test_expand_paths.py +++ b/tests/test_minimal/test_tools/test_expand_paths.py @@ -9,7 +9,7 @@ from neuroconv.tools import LocalPathExpander from neuroconv.tools.path_expansion import construct_path_template from neuroconv.tools.testing import generate_path_expander_demo_ibl -from neuroconv.utils import NWBMetaDataEncoder +from neuroconv.utils import _NWBMetaDataEncoder def create_test_directories_and_files( @@ -409,7 +409,7 @@ def test_expand_paths_ibl(tmpdir): ), ), ) - path_expansion_results = json.loads(json.dumps(path_expansion_results, cls=NWBMetaDataEncoder)) + path_expansion_results = json.loads(json.dumps(path_expansion_results, cls=_NWBMetaDataEncoder)) # build expected output from file expected_file_path = Path(__file__).parent / "expand_paths_ibl_expected.json" diff --git a/tests/test_minimal/test_utils/test_json_schema_utils.py b/tests/test_minimal/test_utils/test_json_schema_utils.py index be03a2699..4edf1e724 100644 --- a/tests/test_minimal/test_utils/test_json_schema_utils.py +++ b/tests/test_minimal/test_utils/test_json_schema_utils.py @@ -6,7 +6,7 @@ from pynwb.ophys import ImagingPlane, TwoPhotonSeries from neuroconv.utils import ( - NWBMetaDataEncoder, + _NWBMetaDataEncoder, dict_deep_update, fill_defaults, get_schema_from_hdmf_class, @@ -204,5 +204,5 @@ def test_get_schema_from_TwoPhotonSeries_array_type(): def test_np_array_encoding(): np_array = np.array([1, 2, 3]) - encoded = json.dumps(np_array, cls=NWBMetaDataEncoder) + encoded = json.dumps(np_array, cls=_NWBMetaDataEncoder) assert encoded == "[1, 2, 3]" diff --git a/tests/test_ophys/test_tools_roiextractors.py b/tests/test_ophys/test_tools_roiextractors.py index 60162527b..f750a2a40 100644 --- a/tests/test_ophys/test_tools_roiextractors.py +++ b/tests/test_ophys/test_tools_roiextractors.py @@ -27,6 +27,7 @@ from neuroconv.tools.nwb_helpers import get_module from neuroconv.tools.roiextractors import ( + _check_if_imaging_fits_into_memory, add_devices_to_nwbfile, add_fluorescence_traces_to_nwbfile, add_image_segmentation_to_nwbfile, @@ -34,7 +35,6 @@ add_photon_series_to_nwbfile, add_plane_segmentation_to_nwbfile, add_summary_images_to_nwbfile, - check_if_imaging_fits_into_memory, ) from neuroconv.tools.roiextractors.imagingextractordatachunkiterator import ( ImagingExtractorDataChunkIterator, @@ -1539,7 +1539,7 @@ def test_non_iterative_write_assertion(self): reg_expression = "Memory error, full TwoPhotonSeries data is (.*?) are available! Please use iterator_type='v2'" with self.assertRaisesRegex(MemoryError, reg_expression): - check_if_imaging_fits_into_memory(imaging=mock_imaging) + _check_if_imaging_fits_into_memory(imaging=mock_imaging) def test_non_iterative_two_photon(self): """Test adding two photon series with using DataChunkIterator as iterator type."""