Skip to content

Commit

Permalink
Merge pull request #13 from MannLabs/package-improvments
Browse files Browse the repository at this point in the history
Update repo and testcases
  • Loading branch information
sophiamaedler authored Aug 27, 2024
2 parents 2cb65b3 + 289b8ae commit 6a987ba
Show file tree
Hide file tree
Showing 7 changed files with 1,395 additions and 40 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:
push:
branches: [ main, release ]
pull_request:
branches: [ main, release ]
workflow_dispatch:

jobs:
build:
Expand Down Expand Up @@ -35,4 +35,4 @@ jobs:
python -m pip install -e .
- name: Test with pytest
run: |
pytest
pytest
1,365 changes: 1,357 additions & 8 deletions docs_source/pages/notebooks/Image_Segmentation/Image_Segmentation_2.ipynb

Large diffs are not rendered by default.

23 changes: 7 additions & 16 deletions docs_source/pages/notebooks/generate_cutting_mask_svg.ipynb

Large diffs are not rendered by default.

5 changes: 5 additions & 0 deletions src/lmd/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import matplotlib
import matplotlib.pyplot

matplotlib.use("Agg")
matplotlib.pyplot.ioff()
23 changes: 16 additions & 7 deletions src/lmd/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from svgelements import SVG
from lmd.segmentation import get_coordinate_form, tsp_greedy_solve, tsp_hilbert_solve, calc_len, _create_coord_index, _filter_coord_index
from tqdm import tqdm
# import warnings
import warnings

from skimage.morphology import dilation as binary_dilation
from skimage.morphology import binary_erosion, disk
Expand Down Expand Up @@ -606,6 +608,7 @@ def __init__(self, config = {}, verbose = False, processes = 1):
self.register_parameter('processes', 10)
self.register_parameter('join_intersecting', True)
self.register_parameter('orientation_transform', np.eye(2))
self.register_parameter('threads', 1)

self.coords_lookup = None
self.processes = processes
Expand Down Expand Up @@ -641,7 +644,6 @@ def __call__(self, input_segmentation, cell_sets, calibration_points, coords_loo
self.coords_lookup = coords_lookup

#try multithreading

if self.processes > 1:
self.log("Processing cell sets in parallel")
args = []
Expand All @@ -659,6 +661,8 @@ def __call__(self, input_segmentation, cell_sets, calibration_points, coords_loo
n_threads = self.processes
)
else:
print("Processing cell sets in serial")
print(cell_set)
collections = []
for i, cell_set in enumerate(cell_sets):
collections.append(self.generate_cutting_data(i, cell_set))
Expand All @@ -667,6 +671,10 @@ def __call__(self, input_segmentation, cell_sets, calibration_points, coords_loo

def generate_cutting_data(self, i, cell_set):

if 0 in cell_set["classes_loaded"]:
cell_set["classes_loaded"] = cell_set["classes_loaded"][cell_set["classes_loaded"] != 0]
warnings.warn("Class 0 is not a valid class and was removed from the cell set")

self.log("Convert label format into coordinate format")
center, length, coords = get_coordinate_form(self.input_segmentation, cell_set["classes_loaded"], self.coords_lookup)

Expand Down Expand Up @@ -698,6 +706,7 @@ def generate_cutting_data(self, i, cell_set):
self.log("Check failed, returned coordinates contain empty elements. Please check if all classes specified are present in your segmentation")

if self.config['join_intersecting']:
print("Merging intersecting shapes")
center, length, coords = self.merge_dilated_shapes(center, length, coords,
dilation = self.config['shape_dilation'],
erosion = self.config['shape_erosion'])
Expand All @@ -723,14 +732,14 @@ def generate_cutting_data(self, i, cell_set):

self.log("Calculating polygons")
if self.config["threads"] == 1:
shapes = []
polygons = []
for shape in tqdm(shapes, desc = "calculating polygons"):
shapes.append(create_poly(shape,
polygons.append(create_poly(shape,
smoothing_filter_size = self.config['convolution_smoothing'],
poly_compression_factor = self.config['poly_compression_factor']))
else:
with mp.get_context(self.context).Pool(processes=self.config['threads']) as pool:
shapes = list(tqdm(pool.imap(partial(create_poly,
polygons = list(tqdm(pool.imap(partial(create_poly,
smoothing_filter_size = self.config['convolution_smoothing'],
poly_compression_factor = self.config['poly_compression_factor']
),
Expand Down Expand Up @@ -780,7 +789,7 @@ def generate_cutting_data(self, i, cell_set):
self.log(f"Optimization factor: {optimization_factor:,.1f}x")

# order list of shapes by the optimized index array
shapes = [x for _, x in sorted(zip(optimized_idx, shapes))]
polygons = [x for _, x in sorted(zip(optimized_idx, polygons))]

# Plot coordinates if in debug mode
if self.verbose:
Expand All @@ -795,7 +804,7 @@ def generate_cutting_data(self, i, cell_set):

ax.scatter(center[:,1],center[:,0], s=1)

for shape in shapes:
for shape in polygons:
ax.plot(shape[:,1],shape[:,0], color="red",linewidth=1)


Expand All @@ -810,7 +819,7 @@ def generate_cutting_data(self, i, cell_set):
ds = Collection(calibration_points = self.calibration_points)
ds.orientation_transform = self.config['orientation_transform']

for shape in shapes:
for shape in polygons:
# Check if well key is set in cell set definition
if "well" in cell_set:
ds.new_shape(shape, well=cell_set["well"])
Expand Down
10 changes: 5 additions & 5 deletions src/lmd/lmd_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,11 @@ def test_text():
my_first_collection.join(identifier_3)

def test_segmentation_loader():
_dir = pathlib.Path(__file__).parent.resolve().absolute()
_dir = str(_dir).replace("src/lmd/", "docs_source/pages/notebooks")
im = Image.open(os.path.join(_dir, 'Image_Segmentation', 'segmentation_cytosol.tiff'))

package_base_path = pathlib.Path(__file__).parent.parent.parent.resolve().absolute()
test_segmentation_path = os.path.join(package_base_path, 'docs_source/pages/notebooks/Image_Segmentation/segmentation_cytosol.tiff')

im = Image.open(test_segmentation_path)
segmentation = np.array(im).astype(np.uint32)

all_classes = np.unique(segmentation)
Expand Down
5 changes: 3 additions & 2 deletions src/lmd/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,12 @@ def _create_coord_index(mask, background=0):
return index_list

@njit
def _filter_coord_index(index_list, classes):
def _filter_coord_index(index_list, classes, background=0):

filtered_index_list = []
for idx, class_id in enumerate(classes):
filtered_index_list.append(index_list[class_id])
if class_id != background:
filtered_index_list.append(index_list[class_id])
return filtered_index_list

def get_coordinate_form(inarr, classes, coords_lookup, debug=False):
Expand Down

0 comments on commit 6a987ba

Please sign in to comment.