Skip to content

Commit

Permalink
More docstrings
Browse files Browse the repository at this point in the history
Signed-off-by: zethson <lukas.heumos@posteo.net>
  • Loading branch information
Zethson committed Nov 14, 2024
1 parent 20f6975 commit bb787ca
Show file tree
Hide file tree
Showing 10 changed files with 772 additions and 1,140 deletions.
12 changes: 6 additions & 6 deletions src/scportrait/io/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ def __init__(
self.debug = debug
self.overwrite = overwrite

self.input_image_status = None
self.segmentation_status = None
self.centers_status = None
self.input_image_status: bool | None = None
self.segmentation_status: bool | None = None
self.centers_status: bool | None = None

def _check_memory(self, item) -> bool:
"""Check if item can fit in available memory.
Expand All @@ -77,7 +77,7 @@ def _create_temp_dir(self) -> None:
Creates directory in cache location specified in config or current working directory.
"""
if self.cache is not None:
path = os.path.join(self.config["cache"], f"{self.__class__.__name__}_")
path = os.path.join(self.directory["cache"], f"{self.__class__.__name__}_") # type: ignore
else:
path = os.path.join(os.getcwd(), f"{self.__class__.__name__}_")

Expand Down Expand Up @@ -338,10 +338,10 @@ def write_centers_to_spatialdata(self, coordinate_system: str = "global") -> Non
_ids = cPickle.load(input_file)
centroids = self._make_centers_object(centers, _ids, transform, coordinate_system=coordinate_system)
else:
if self.segementation_status:
if self.segmentation_status:
self.log("No centers found in project. Recalculating based on the provided segmentation mask.")

if self.check_memory(mask):
if self._check_memory(mask):
self.log("Calculating centers using numba This should be quick.")
centers, _, _ids = numba_mask_centroid(mask.values)
centroids = self._make_centers_object(centers, _ids, transform, coordinate_system=coordinate_system)
Expand Down
6 changes: 3 additions & 3 deletions src/scportrait/io/daskmmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ def calculate_chunk_sizes_chunks(
target_size_bytes = target_size_gb * 1024**3
total_elements_per_chunk = target_size_bytes // element_size

HDF5_chunk_size = list(HDF5_chunk_size)
chunk_sizes = HDF5_chunk_size.copy()
HDF5_chunk_size = list(HDF5_chunk_size) # type: ignore
chunk_sizes = HDF5_chunk_size.copy() # type: ignore

while np.prod(chunk_sizes) < total_elements_per_chunk:
for i in range(len(chunk_sizes)):
Expand Down Expand Up @@ -209,4 +209,4 @@ def load_hdf5_chunk(file_path: str, container_name: str, slices: tuple[slice, ..
The sliced chunk from the HDF5 dataset
"""
with h5py.File(file_path, "r") as f:
data = f[container_name][slices]
f[container_name][slices]
5 changes: 3 additions & 2 deletions src/scportrait/pipeline/_utils/helper.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import TypeVar

T = TypeVar('T')
T = TypeVar("T")


def flatten(nested_list: list[list[T]]) -> list[T]:
"""Flatten a list of lists into a single list.
Expand All @@ -16,4 +17,4 @@ def flatten(nested_list: list[list[T]]) -> list[T]:
>>> flatten(nested_list)
[1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
return [item for sublist in nested_list for item in sublist]
return [item for sublist in nested_list for item in sublist]
Loading

0 comments on commit bb787ca

Please sign in to comment.