Skip to content

Commit bd77cd9

Browse files
committed
Revert "formatting changes"
This reverts the formatting changes that were accidentally included with commit 0c21a73.
1 parent 0c21a73 commit bd77cd9

File tree

2 files changed

+154
-54
lines changed

2 files changed

+154
-54
lines changed

iohub/ngff/nodes.py

Lines changed: 93 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,9 @@ def _open_store(
5858
synchronizer=None,
5959
):
6060
if not os.path.isdir(store_path) and mode in ("r", "r+"):
61-
raise FileNotFoundError(f"Dataset directory not found at {store_path}.")
61+
raise FileNotFoundError(
62+
f"Dataset directory not found at {store_path}."
63+
)
6264
if version != "0.4":
6365
_logger.warning(
6466
"IOHub is only tested against OME-NGFF v0.4. "
@@ -68,10 +70,14 @@ def _open_store(
6870
else:
6971
dimension_separator = "/"
7072
try:
71-
store = zarr.DirectoryStore(store_path, dimension_separator=dimension_separator)
73+
store = zarr.DirectoryStore(
74+
store_path, dimension_separator=dimension_separator
75+
)
7276
root = zarr.open_group(store, mode=mode, synchronizer=synchronizer)
7377
except Exception as e:
74-
raise RuntimeError(f"Cannot open Zarr root group at {store_path}") from e
78+
raise RuntimeError(
79+
f"Cannot open Zarr root group at {store_path}"
80+
) from e
7581
return root
7682

7783

@@ -102,7 +108,9 @@ def __init__(
102108
if channel_names:
103109
self._channel_names = channel_names
104110
elif not parse_meta:
105-
raise ValueError("Channel names need to be provided or in metadata.")
111+
raise ValueError(
112+
"Channel names need to be provided or in metadata."
113+
)
106114
if axes:
107115
self.axes = axes
108116
self._group = group
@@ -533,8 +541,12 @@ def _parse_meta(self):
533541
omero = self.zattrs.get("omero")
534542
if multiscales and omero:
535543
try:
536-
self.metadata = ImagesMeta(multiscales=multiscales, omero=omero)
537-
self._channel_names = [c.label for c in self.metadata.omero.channels]
544+
self.metadata = ImagesMeta(
545+
multiscales=multiscales, omero=omero
546+
)
547+
self._channel_names = [
548+
c.label for c in self.metadata.omero.channels
549+
]
538550
self.axes = self.metadata.multiscales[0].axes
539551
except ValidationError:
540552
self._warn_invalid_meta()
@@ -548,7 +560,9 @@ def dump_meta(self):
548560
@property
549561
def _storage_options(self):
550562
return {
551-
"compressor": Blosc(cname="zstd", clevel=1, shuffle=Blosc.BITSHUFFLE),
563+
"compressor": Blosc(
564+
cname="zstd", clevel=1, shuffle=Blosc.BITSHUFFLE
565+
),
552566
"overwrite": self._overwrite,
553567
}
554568

@@ -585,7 +599,8 @@ def data(self):
585599
return self["0"]
586600
except KeyError:
587601
raise KeyError(
588-
"There is no array named '0' " f"in the group of: {self.array_keys()}"
602+
"There is no array named '0' "
603+
f"in the group of: {self.array_keys()}"
589604
)
590605

591606
def __getitem__(self, key: int | str) -> ImageArray:
@@ -609,7 +624,9 @@ def __setitem__(self, key, value: NDArray):
609624
"""Write an up-to-5D image with default settings."""
610625
key = normalize_storage_path(key)
611626
if not isinstance(value, np.ndarray):
612-
raise TypeError(f"Value must be a NumPy array. Got type {type(value)}.")
627+
raise TypeError(
628+
f"Value must be a NumPy array. Got type {type(value)}."
629+
)
613630
self.create_image(key, value)
614631

615632
def images(self) -> Generator[tuple[str, ImageArray]]:
@@ -659,7 +676,9 @@ def create_image(
659676
if check_shape:
660677
self._check_shape(data.shape)
661678
img_arr = ImageArray(
662-
self._group.array(name, data, chunks=chunks, **self._storage_options)
679+
self._group.array(
680+
name, data, chunks=chunks, **self._storage_options
681+
)
663682
)
664683
self._create_image_meta(img_arr.basename, transform=transform)
665684
return img_arr
@@ -742,7 +761,8 @@ def _check_shape(self, data_shape: tuple[int]):
742761
_logger.warning(msg)
743762
else:
744763
_logger.info(
745-
"Dataset channel axis is not set. " "Skipping channel shape check."
764+
"Dataset channel axis is not set. "
765+
"Skipping channel shape check."
746766
)
747767

748768
def _create_image_meta(
@@ -753,7 +773,9 @@ def _create_image_meta(
753773
):
754774
if not transform:
755775
transform = [TransformationMeta(type="identity")]
756-
dataset_meta = DatasetMeta(path=name, coordinate_transformations=transform)
776+
dataset_meta = DatasetMeta(
777+
path=name, coordinate_transformations=transform
778+
)
757779
if not hasattr(self, "metadata"):
758780
self.metadata = ImagesMeta(
759781
multiscales=[
@@ -762,13 +784,18 @@ def _create_image_meta(
762784
axes=self.axes,
763785
datasets=[dataset_meta],
764786
name=name,
765-
coordinateTransformations=[TransformationMeta(type="identity")],
787+
coordinateTransformations=[
788+
TransformationMeta(type="identity")
789+
],
766790
metadata=extra_meta,
767791
)
768792
],
769793
omero=self._omero_meta(id=0, name=self._group.basename),
770794
)
771-
elif dataset_meta.path not in self.metadata.multiscales[0].get_dataset_paths():
795+
elif (
796+
dataset_meta.path
797+
not in self.metadata.multiscales[0].get_dataset_paths()
798+
):
772799
self.metadata.multiscales[0].datasets.append(dataset_meta)
773800
self.dump_meta()
774801

@@ -781,11 +808,15 @@ def _omero_meta(
781808
if not clims:
782809
clims = [None] * len(self.channel_names)
783810
channels = []
784-
for i, (channel_name, clim) in enumerate(zip(self.channel_names, clims)):
811+
for i, (channel_name, clim) in enumerate(
812+
zip(self.channel_names, clims)
813+
):
785814
if i == 0:
786815
first_chan = True
787816
channels.append(
788-
channel_display_settings(channel_name, clim=clim, first_chan=first_chan)
817+
channel_display_settings(
818+
channel_name, clim=clim, first_chan=first_chan
819+
)
789820
)
790821
omero_meta = OMEROMeta(
791822
version=self.version,
@@ -805,7 +836,8 @@ def _find_axis(self, axis_type):
805836
def _get_channel_axis(self):
806837
if (ch_ax := self._find_axis("channel")) is None:
807838
raise KeyError(
808-
"Axis 'channel' does not exist. " "Please update `self.axes` first."
839+
"Axis 'channel' does not exist. "
840+
"Please update `self.axes` first."
809841
)
810842
else:
811843
return ch_ax
@@ -834,10 +866,14 @@ def append_channel(self, chan_name: str, resize_arrays: bool = True):
834866
elif ch_ax == len(shape):
835867
shape = _pad_shape(tuple(shape), target=len(shape) + 1)
836868
else:
837-
raise IndexError(f"Cannot infer channel axis for shape {shape}.")
869+
raise IndexError(
870+
f"Cannot infer channel axis for shape {shape}."
871+
)
838872
img.resize(shape)
839873
if "omero" in self.metadata.model_dump().keys():
840-
self.metadata.omero.channels.append(channel_display_settings(chan_name))
874+
self.metadata.omero.channels.append(
875+
channel_display_settings(chan_name)
876+
)
841877
self.dump_meta()
842878

843879
def rename_channel(self, old: str, new: str):
@@ -901,12 +937,18 @@ def initialize_pyramid(self, levels: int) -> None:
901937
for level in range(1, levels):
902938
factor = 2**level
903939

904-
shape = array.shape[:-3] + _scale_integers(array.shape[-3:], factor)
940+
shape = array.shape[:-3] + _scale_integers(
941+
array.shape[-3:], factor
942+
)
905943

906-
chunks = _pad_shape(_scale_integers(array.chunks, factor), len(shape))
944+
chunks = _pad_shape(
945+
_scale_integers(array.chunks, factor), len(shape)
946+
)
907947

908948
transforms = deepcopy(
909-
self.metadata.multiscales[0].datasets[0].coordinate_transformations
949+
self.metadata.multiscales[0]
950+
.datasets[0]
951+
.coordinate_transformations
910952
)
911953
for tr in transforms:
912954
if tr.type == "scale":
@@ -928,7 +970,9 @@ def scale(self) -> list[float]:
928970
highest resolution scale.
929971
"""
930972
scale = [1] * self.data.ndim
931-
transforms = self.metadata.multiscales[0].datasets[0].coordinate_transformations
973+
transforms = (
974+
self.metadata.multiscales[0].datasets[0].coordinate_transformations
975+
)
932976
for trans in transforms:
933977
if trans.type == "scale":
934978
if len(trans.scale) != len(scale):
@@ -946,7 +990,9 @@ def axis_names(self) -> list[str]:
946990
947991
Returns lowercase axis names.
948992
"""
949-
return [axis.name.lower() for axis in self.metadata.multiscales[0].axes]
993+
return [
994+
axis.name.lower() for axis in self.metadata.multiscales[0].axes
995+
]
950996

951997
def get_axis_index(self, axis_name: str) -> int:
952998
"""
@@ -1021,7 +1067,9 @@ def set_transform(
10211067
if image == "*":
10221068
self.metadata.multiscales[0].coordinate_transformations = transform
10231069
elif image in self:
1024-
for i, dataset_meta in enumerate(self.metadata.multiscales[0].datasets):
1070+
for i, dataset_meta in enumerate(
1071+
self.metadata.multiscales[0].datasets
1072+
):
10251073
if dataset_meta.path == image:
10261074
self.metadata.multiscales[0].datasets[i] = DatasetMeta(
10271075
path=image, coordinate_transformations=transform
@@ -1050,7 +1098,9 @@ def set_scale(
10501098
Value of the new scale.
10511099
"""
10521100
if len(self.metadata.multiscales) > 1:
1053-
raise NotImplementedError("Cannot set scale for multi-resolution images.")
1101+
raise NotImplementedError(
1102+
"Cannot set scale for multi-resolution images."
1103+
)
10541104

10551105
if new_scale <= 0:
10561106
raise ValueError("New scale must be positive.")
@@ -1065,7 +1115,9 @@ def set_scale(
10651115
self.zattrs["iohub"] = iohub_dict
10661116

10671117
# Update scale while preserving existing transforms
1068-
transforms = self.metadata.multiscales[0].datasets[0].coordinate_transformations
1118+
transforms = (
1119+
self.metadata.multiscales[0].datasets[0].coordinate_transformations
1120+
)
10691121
# Replace default identity transform with scale
10701122
if len(transforms) == 1 and transforms[0].type == "identity":
10711123
transforms = [TransformationMeta(type="scale", scale=[1] * 5)]
@@ -1191,7 +1243,9 @@ def _parse_meta(self):
11911243

11921244
def dump_meta(self):
11931245
"""Dumps metadata JSON to the `.zattrs` file."""
1194-
self.zattrs.update({"well": self.metadata.model_dump(**TO_DICT_SETTINGS)})
1246+
self.zattrs.update(
1247+
{"well": self.metadata.model_dump(**TO_DICT_SETTINGS)}
1248+
)
11951249

11961250
def __getitem__(self, key: str):
11971251
"""Get a position member of the well.
@@ -1372,7 +1426,8 @@ def from_positions(
13721426
for name, src_pos in positions.items():
13731427
if not isinstance(src_pos, Position):
13741428
raise TypeError(
1375-
f"Expected item type {type(Position)}, " f"got {type(src_pos)}"
1429+
f"Expected item type {type(Position)}, "
1430+
f"got {type(src_pos)}"
13761431
)
13771432
name = normalize_storage_path(name)
13781433
if name in plate.zgroup:
@@ -1455,7 +1510,9 @@ def dump_meta(self, field_count: bool = False):
14551510
"""
14561511
if field_count:
14571512
self.metadata.field_count = len(list(self.positions()))
1458-
self.zattrs.update({"plate": self.metadata.model_dump(**TO_DICT_SETTINGS)})
1513+
self.zattrs.update(
1514+
{"plate": self.metadata.model_dump(**TO_DICT_SETTINGS)}
1515+
)
14591516

14601517
def _auto_idx(
14611518
self,
@@ -1543,7 +1600,9 @@ def create_well(
15431600
self.metadata.wells.append(well_index_meta)
15441601
# create new row if needed
15451602
if row_name not in self:
1546-
row_grp = self.zgroup.create_group(row_meta.name, overwrite=self._overwrite)
1603+
row_grp = self.zgroup.create_group(
1604+
row_meta.name, overwrite=self._overwrite
1605+
)
15471606
if row_meta not in self.metadata.rows:
15481607
self.metadata.rows.append(row_meta)
15491608
else:
@@ -1666,9 +1725,9 @@ def rename_well(
16661725
self.zgroup.move(old, new)
16671726

16681727
# update well metadata
1669-
old_well_index = [well_name.path for well_name in self.metadata.wells].index(
1670-
old
1671-
)
1728+
old_well_index = [
1729+
well_name.path for well_name in self.metadata.wells
1730+
].index(old)
16721731
self.metadata.wells[old_well_index].path = new
16731732
new_well_names = [well.path for well in self.metadata.wells]
16741733

0 commit comments

Comments
 (0)