Skip to content

Commit

Permalink
Make some methods private (#1050)
Browse files Browse the repository at this point in the history
Co-authored-by: Cody Baker <51133164+CodyCBakerPhD@users.noreply.github.com>
  • Loading branch information
h-mayorquin and CodyCBakerPhD authored Sep 2, 2024
1 parent 925f183 commit e73745a
Show file tree
Hide file tree
Showing 14 changed files with 29 additions and 28 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Upcoming

### Deprecations
* The following classes and objects are now private `NWBMetaDataEncoder`, `NWBMetaDataEncoder`, `check_if_imaging_fits_into_memory`, `NoDatesSafeLoader` [PR #1050](https://github.com/catalystneuro/neuroconv/pull/1050)

### Features

Expand Down
4 changes: 2 additions & 2 deletions src/neuroconv/basedatainterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
)
from .tools.nwb_helpers._metadata_and_file_helpers import _resolve_backend
from .utils import (
NWBMetaDataEncoder,
_NWBMetaDataEncoder,
get_json_schema_from_method_signature,
load_dict_from_file,
)
Expand Down Expand Up @@ -63,7 +63,7 @@ def get_metadata(self) -> DeepDict:

def validate_metadata(self, metadata: dict, append_mode: bool = False) -> None:
"""Validate the metadata against the schema."""
encoder = NWBMetaDataEncoder()
encoder = _NWBMetaDataEncoder()
# The encoder produces a serialized object, so we deserialized it for comparison

serialized_metadata = encoder.encode(metadata)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from .nvt_utils import read_data, read_header
from ....basetemporalalignmentinterface import BaseTemporalAlignmentInterface
from ....utils import DeepDict, NWBMetaDataEncoder, get_base_schema
from ....utils import DeepDict, _NWBMetaDataEncoder, get_base_schema
from ....utils.path import infer_path


Expand Down Expand Up @@ -136,7 +136,7 @@ def add_to_nwbfile(
unit="pixels",
conversion=1.0,
timestamps=self.get_timestamps(),
description=f"Pixel x and y coordinates from the .nvt file with header data: {json.dumps(self.header, cls=NWBMetaDataEncoder)}",
description=f"Pixel x and y coordinates from the .nvt file with header data: {json.dumps(self.header, cls=_NWBMetaDataEncoder)}",
)

nwbfile.add_acquisition(Position([spatial_series], name="NvtPosition"))
Expand All @@ -151,7 +151,7 @@ def add_to_nwbfile(
unit="degrees",
conversion=1.0,
timestamps=spatial_series if add_position else self.get_timestamps(),
description=f"Angle from the .nvt file with header data: {json.dumps(self.header, cls=NWBMetaDataEncoder)}",
description=f"Angle from the .nvt file with header data: {json.dumps(self.header, cls=_NWBMetaDataEncoder)}",
),
name="NvtCompassDirection",
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from pydantic import FilePath

from ..baserecordingextractorinterface import BaseRecordingExtractorInterface
from ....utils.json_schema import NWBMetaDataEncoder
from ....utils.json_schema import _NWBMetaDataEncoder


class MEArecRecordingInterface(BaseRecordingExtractorInterface):
Expand Down Expand Up @@ -61,7 +61,7 @@ def get_metadata(self) -> dict:
for unneeded_key in ["fs", "dtype"]:
recording_metadata.pop(unneeded_key)
metadata["Ecephys"].update(
{self.es_key: dict(name=self.es_key, description=json.dumps(recording_metadata, cls=NWBMetaDataEncoder))}
{self.es_key: dict(name=self.es_key, description=json.dumps(recording_metadata, cls=_NWBMetaDataEncoder))}
)

return metadata
6 changes: 3 additions & 3 deletions src/neuroconv/nwbconverter.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
unroot_schema,
)
from .utils.dict import DeepDict
from .utils.json_schema import NWBMetaDataEncoder, NWBSourceDataEncoder
from .utils.json_schema import _NWBMetaDataEncoder, _NWBSourceDataEncoder


class NWBConverter:
Expand Down Expand Up @@ -63,7 +63,7 @@ def validate_source(cls, source_data: dict[str, dict], verbose: bool = True):

def _validate_source_data(self, source_data: dict[str, dict], verbose: bool = True):

encoder = NWBSourceDataEncoder()
encoder = _NWBSourceDataEncoder()
# The encoder produces a serialized object, so we deserialized it for comparison

serialized_source_data = encoder.encode(source_data)
Expand Down Expand Up @@ -104,7 +104,7 @@ def get_metadata(self) -> DeepDict:

def validate_metadata(self, metadata: dict[str, dict], append_mode: bool = False):
"""Validate metadata against Converter metadata_schema."""
encoder = NWBMetaDataEncoder()
encoder = _NWBMetaDataEncoder()
# The encoder produces a serialized object, so we deserialized it for comparison
serialized_metadata = encoder.encode(metadata)
decoded_metadata = json.loads(serialized_metadata)
Expand Down
2 changes: 1 addition & 1 deletion src/neuroconv/tools/roiextractors/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from .roiextractors import (
check_if_imaging_fits_into_memory,
_check_if_imaging_fits_into_memory,
get_nwb_imaging_metadata,
get_nwb_segmentation_metadata,
add_background_fluorescence_traces,
Expand Down
4 changes: 2 additions & 2 deletions src/neuroconv/tools/roiextractors/roiextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,7 @@ def add_photon_series_to_nwbfile(
return nwbfile


def check_if_imaging_fits_into_memory(imaging: ImagingExtractor) -> None:
def _check_if_imaging_fits_into_memory(imaging: ImagingExtractor) -> None:
"""
Raise an error if the full traces of an imaging extractor are larger than available memory.
Expand Down Expand Up @@ -625,7 +625,7 @@ def data_generator(imaging):
iterator_options = dict() if iterator_options is None else iterator_options

if iterator_type is None:
check_if_imaging_fits_into_memory(imaging=imaging)
_check_if_imaging_fits_into_memory(imaging=imaging)
return imaging.get_video().transpose((0, 2, 1))

if iterator_type == "v1":
Expand Down
4 changes: 2 additions & 2 deletions src/neuroconv/tools/testing/data_interface_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
configure_backend,
get_default_backend_configuration,
)
from neuroconv.utils import NWBMetaDataEncoder
from neuroconv.utils import _NWBMetaDataEncoder


class DataInterfaceTestMixin:
Expand Down Expand Up @@ -98,7 +98,7 @@ def check_metadata(self):
if "session_start_time" not in metadata["NWBFile"]:
metadata["NWBFile"].update(session_start_time=datetime.now().astimezone())
# handle json encoding of datetimes and other tricky types
metadata_for_validation = json.loads(json.dumps(metadata, cls=NWBMetaDataEncoder))
metadata_for_validation = json.loads(json.dumps(metadata, cls=_NWBMetaDataEncoder))
validate(metadata_for_validation, schema)
self.check_extracted_metadata(metadata)

Expand Down
2 changes: 1 addition & 1 deletion src/neuroconv/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
load_dict_from_file,
)
from .json_schema import (
NWBMetaDataEncoder,
_NWBMetaDataEncoder,
fill_defaults,
get_base_schema,
get_metadata_schema_for_icephys,
Expand Down
6 changes: 3 additions & 3 deletions src/neuroconv/utils/dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pydantic import FilePath


class NoDatesSafeLoader(yaml.SafeLoader):
class _NoDatesSafeLoader(yaml.SafeLoader):
"""Custom override of yaml Loader class for datetime considerations."""

@classmethod
Expand All @@ -33,7 +33,7 @@ def remove_implicit_resolver(cls, tag_to_remove):
]


NoDatesSafeLoader.remove_implicit_resolver("tag:yaml.org,2002:timestamp")
_NoDatesSafeLoader.remove_implicit_resolver("tag:yaml.org,2002:timestamp")


def load_dict_from_file(file_path: FilePath) -> dict:
Expand All @@ -44,7 +44,7 @@ def load_dict_from_file(file_path: FilePath) -> dict:

if file_path.suffix in (".yml", ".yaml"):
with open(file=file_path, mode="r") as stream:
dictionary = yaml.load(stream=stream, Loader=NoDatesSafeLoader)
dictionary = yaml.load(stream=stream, Loader=_NoDatesSafeLoader)
elif file_path.suffix == ".json":
with open(file=file_path, mode="r") as fp:
dictionary = json.load(fp=fp)
Expand Down
6 changes: 3 additions & 3 deletions src/neuroconv/utils/json_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from pynwb.icephys import IntracellularElectrode


class NWBMetaDataEncoder(json.JSONEncoder):
class _NWBMetaDataEncoder(json.JSONEncoder):
"""
Custom JSON encoder for NWB metadata.
Expand All @@ -43,7 +43,7 @@ def default(self, obj):
return super().default(obj)


class NWBSourceDataEncoder(NWBMetaDataEncoder):
class _NWBSourceDataEncoder(_NWBMetaDataEncoder):
"""
Custom JSON encoder for data interface source data (i.e. kwargs).
Expand Down Expand Up @@ -350,7 +350,7 @@ def get_metadata_schema_for_icephys():

def validate_metadata(metadata: dict[str, dict], schema: dict[str, dict], verbose: bool = False):
"""Validate metadata against a schema."""
encoder = NWBMetaDataEncoder()
encoder = _NWBMetaDataEncoder()
# The encoder produces a serialized object, so we deserialized it for comparison

serialized_metadata = encoder.encode(metadata)
Expand Down
4 changes: 2 additions & 2 deletions tests/test_minimal/test_tools/test_expand_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from neuroconv.tools import LocalPathExpander
from neuroconv.tools.path_expansion import construct_path_template
from neuroconv.tools.testing import generate_path_expander_demo_ibl
from neuroconv.utils import NWBMetaDataEncoder
from neuroconv.utils import _NWBMetaDataEncoder


def create_test_directories_and_files(
Expand Down Expand Up @@ -409,7 +409,7 @@ def test_expand_paths_ibl(tmpdir):
),
),
)
path_expansion_results = json.loads(json.dumps(path_expansion_results, cls=NWBMetaDataEncoder))
path_expansion_results = json.loads(json.dumps(path_expansion_results, cls=_NWBMetaDataEncoder))

# build expected output from file
expected_file_path = Path(__file__).parent / "expand_paths_ibl_expected.json"
Expand Down
4 changes: 2 additions & 2 deletions tests/test_minimal/test_utils/test_json_schema_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from pynwb.ophys import ImagingPlane, TwoPhotonSeries

from neuroconv.utils import (
NWBMetaDataEncoder,
_NWBMetaDataEncoder,
dict_deep_update,
fill_defaults,
get_schema_from_hdmf_class,
Expand Down Expand Up @@ -204,5 +204,5 @@ def test_get_schema_from_TwoPhotonSeries_array_type():

def test_np_array_encoding():
np_array = np.array([1, 2, 3])
encoded = json.dumps(np_array, cls=NWBMetaDataEncoder)
encoded = json.dumps(np_array, cls=_NWBMetaDataEncoder)
assert encoded == "[1, 2, 3]"
4 changes: 2 additions & 2 deletions tests/test_ophys/test_tools_roiextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,14 @@

from neuroconv.tools.nwb_helpers import get_module
from neuroconv.tools.roiextractors import (
_check_if_imaging_fits_into_memory,
add_devices_to_nwbfile,
add_fluorescence_traces_to_nwbfile,
add_image_segmentation_to_nwbfile,
add_imaging_plane_to_nwbfile,
add_photon_series_to_nwbfile,
add_plane_segmentation_to_nwbfile,
add_summary_images_to_nwbfile,
check_if_imaging_fits_into_memory,
)
from neuroconv.tools.roiextractors.imagingextractordatachunkiterator import (
ImagingExtractorDataChunkIterator,
Expand Down Expand Up @@ -1539,7 +1539,7 @@ def test_non_iterative_write_assertion(self):
reg_expression = "Memory error, full TwoPhotonSeries data is (.*?) are available! Please use iterator_type='v2'"

with self.assertRaisesRegex(MemoryError, reg_expression):
check_if_imaging_fits_into_memory(imaging=mock_imaging)
_check_if_imaging_fits_into_memory(imaging=mock_imaging)

def test_non_iterative_two_photon(self):
"""Test adding two photon series with using DataChunkIterator as iterator type."""
Expand Down

0 comments on commit e73745a

Please sign in to comment.