Skip to content

Commit

Permalink
Add components test
Browse files Browse the repository at this point in the history
  • Loading branch information
sheridana committed Dec 27, 2022
1 parent 73f6c56 commit 64b52a0
Show file tree
Hide file tree
Showing 4 changed files with 186 additions and 23 deletions.
7 changes: 4 additions & 3 deletions lsd/train/local_shape_descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,10 @@ def get_local_shape_descriptors(
components (``string`` of ``int``, optional):
The components of the local shape descriptors to compute and return.
"012" returns the first three components. "0129" returns the first three and
last components if 3D, "0125" if 2D. Components must be in ascending order.
Defaults to all components.
"012" returns the first three components. "0129" returns the first
three and last components if 3D, "0125" if 2D. Components must be in
ascending order. Defaults to all components. Valid component
combinations can be seen in tests folder (components test).
Component string lookup, where example component : "3D axes", "2D axes"
Expand Down
154 changes: 154 additions & 0 deletions tests/test_components.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
import logging
import mahotas
import numpy as np
from lsd.train import LsdExtractor, local_shape_descriptor
from scipy.ndimage import gaussian_filter, maximum_filter

logging.basicConfig(level=logging.INFO)


def create_random_segmentation(seed, size):

np.random.seed(seed)
peaks = np.random.random(size).astype(np.float32)
peaks = gaussian_filter(peaks, sigma=5.0)
max_filtered = maximum_filter(peaks, 10)
maxima = max_filtered == peaks
seeds, n = mahotas.label(maxima)
print("Creating segmentation with %d segments" % n)
return mahotas.cwatershed(1.0 - peaks, seeds).astype(np.uint64)


def create_lsds(size, combs):

gt = create_random_segmentation(42, size)

sigma = (10,) * len(size)
voxel_size = (1,) * len(size)

lsd_extractor = LsdExtractor(sigma=sigma)

for comb in combs:

# via extractor
lsds = lsd_extractor.get_descriptors(gt, components=comb)

assert len(comb) == lsds.shape[0]

# without extractor
lsds = local_shape_descriptor.get_local_shape_descriptors(
segmentation=gt,
components=comb,
sigma=sigma,
voxel_size=voxel_size,
)

assert len(comb) == lsds.shape[0]


def test_components_2d():

"""possible 2d component combinations
--- base components
mean offset: 01
covariance: 23
pearsons: 4
size: 5
--- 2 combs
mean offset + covariance: 0123
mean offset + pearsons: 014
mean offset + size: 015
covariance + pearsons: 234
covariance + size: 235
pearsons + size: 45
--- 3 combs
mean offset + covariance + pearsons: 01234
mean offset + covariance + size: 01235
mean offset + pearsons + size: 0145
covariance + pearsons + size: 2345
--- all: 012345
"""

combs = [
"01",
"23",
"4",
"5",
"0123",
"014",
"015",
"234",
"235",
"45",
"01234",
"01235",
"0145",
"2345",
"012345",
]

size = (100, 100)

create_lsds(size, combs)


def test_components_3d():

"""possible 3d component combinations
--- base components
mean offset: 012
covariance: 345
pearsons: 678
size: 9
--- 2 combs
mean offset + covariance: 012345
mean offset + pearsons: 012678
mean offset + size: 0129
covariance + pearsons: 345678
covariance + size: 3459
pearsons + size: 6789
--- 3 combs
mean offset + covariance + pearsons: 012345678
mean offset + covariance + size: 0123459
mean offset + pearsons + size: 0126789
covariance + pearsons + size: 3456789
--- all: 0123456789 (can ignore)
"""

combs = [
"012",
"345",
"678",
"9",
"012345",
"012678",
"0129",
"345678",
"3459",
"6789",
"012345678",
"0123459",
"0126789",
"3456789",
"0123456789",
]

size = (1, 50, 50)

create_lsds(size, combs)
30 changes: 18 additions & 12 deletions tests/test_strided.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,29 +2,34 @@
from numpy.lib.stride_tricks import as_strided
from numpy.testing import *


def _2d(array, f):

shape = array.shape
stride = array.strides

view = as_strided(
array,
(shape[0], shape[1], f, shape[2], f),
(stride[0], stride[1], 0, stride[2], 0))
array,
(shape[0], shape[1], f, shape[2], f),
(stride[0], stride[1], 0, stride[2], 0),
)

return view.reshape(shape[0], shape[1] * f, shape[2] * f)

return view.reshape(shape[0], shape[1]*f, shape[2]*f)

def _3d(array, f):

shape = array.shape
stride = array.strides

view = as_strided(
array,
(shape[0], shape[1], f, shape[2], f, shape[3], f),
(stride[0], stride[1], 0, stride[2], 0, stride[3], 0))
array,
(shape[0], shape[1], f, shape[2], f, shape[3], f),
(stride[0], stride[1], 0, stride[2], 0, stride[3], 0),
)

return view.reshape(shape[0], shape[1] * f, shape[2] * f, shape[3] * f)

return view.reshape(shape[0], shape[1]*f, shape[2]*f, shape[3]*f)

def _2d_3d(array, f):

Expand All @@ -38,17 +43,18 @@ def _2d_3d(array, f):
sh = (shape[0], shape[1], f, shape[2], f)
st = (stride[0], stride[1], 0, stride[2], 0)

view = as_strided(array,sh,st)
view = as_strided(array, sh, st)

l = [shape[0]]
[l.append(shape[i+1]*f) for i,j in enumerate(shape[1:])]
[l.append(shape[i + 1] * f) for i, j in enumerate(shape[1:])]

return view.reshape(l)


def test_strided():

a_2d = np.array([[[1,1,1],[1,1,1],[1,1,1]]])
a_3d = np.array([[[[1,1,1],[1,1,1],[1,1,1]]]])
a_2d = np.array([[[1, 1, 1], [1, 1, 1], [1, 1, 1]]])
a_3d = np.array([[[[1, 1, 1], [1, 1, 1], [1, 1, 1]]]])

for f in range(10):

Expand Down
18 changes: 10 additions & 8 deletions tests/test_synthetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,19 @@

size = (1, 100, 100)


def create_random_segmentation(seed):

np.random.seed(seed)
peaks = np.random.random(size).astype(np.float32)
peaks = gaussian_filter(peaks, sigma=5.0)
max_filtered = maximum_filter(peaks, 10)
maxima = max_filtered==peaks
maxima = max_filtered == peaks
seeds, n = mahotas.label(maxima)
print("Creating segmentation with %d segments"%n)
print("Creating segmentation with %d segments" % n)
return mahotas.cwatershed(1.0 - peaks, seeds).astype(np.uint64)


def test_synthetic():

# factor of pixel-wise uniformly sampled noise (in [-0.5, 0.5]) to add to
Expand All @@ -30,20 +32,20 @@ def test_synthetic():
gt = create_random_segmentation(42)
fragments = create_random_segmentation(23)
# intersect gt and fragments to get an oversegmentation
fragments = gt + (fragments + 1)*gt.max()
fragments = gt + (fragments + 1) * gt.max()

lsd_extractor = LsdExtractor(sigma=(10.0, 10.0, 10.0))
predicted_lsds = lsd_extractor.get_descriptors(gt)

if noise_factor > 0:
noise = -0.5 + np.random.random(predicted_lsds.shape)
predicted_lsds += noise*noise_factor
predicted_lsds += noise * noise_factor
predicted_lsds = predicted_lsds.clip(0, 1)

name = 'test_synthetic_noise=%d.hdf'%noise_factor
name = "test_synthetic_noise=%d.hdf" % noise_factor

with h5py.File(name, 'w') as f:
f['volumes/gt'] = gt
f['volumes/predicted_lsds'] = predicted_lsds[0:3]
with h5py.File(name, "w") as f:
f["volumes/gt"] = gt
f["volumes/predicted_lsds"] = predicted_lsds[0:3]

os.remove(name)

0 comments on commit 64b52a0

Please sign in to comment.