From a779d4e4f3b2d91d934810ca22ad8eb5d3944d3e Mon Sep 17 00:00:00 2001 From: Huan Luo Date: Fri, 21 Sep 2018 13:40:37 -0400 Subject: [PATCH] Add freesurfer information Other function may not work well since the input in fiber.pyx has been changed --- bin/wm_cluster_atlas.py | 27 +++++-- whitematteranalysis/cluster.py | 115 +++++++++++++++++------------ whitematteranalysis/fibers.pyx | 65 +++++++++++++++- whitematteranalysis/similarity.pyx | 24 ++++-- 4 files changed, 168 insertions(+), 63 deletions(-) mode change 100644 => 100755 whitematteranalysis/cluster.py mode change 100644 => 100755 whitematteranalysis/fibers.pyx mode change 100644 => 100755 whitematteranalysis/similarity.pyx diff --git a/bin/wm_cluster_atlas.py b/bin/wm_cluster_atlas.py index 3546e0db..dfc584b1 100755 --- a/bin/wm_cluster_atlas.py +++ b/bin/wm_cluster_atlas.py @@ -5,6 +5,7 @@ import multiprocessing import vtk import time +import nibabel try: import whitematteranalysis as wma @@ -27,6 +28,9 @@ parser.add_argument( 'inputDirectory', help='A directory of (already registered) whole-brain tractography as vtkPolyData (.vtk or .vtp).') +parser.add_argument( + 'freesurfer', + help='freesurfer file in nifti (default for freesurfer result), e.g. /Users/Desktop/xxx.nii.gz') parser.add_argument( 'outputDirectory', help='The output directory will be created if it does not exist.') @@ -104,6 +108,10 @@ if not os.path.isdir(args.inputDirectory): print " Error: Input directory", args.inputDirectory, "does not exist or is not a directory." exit() + +if not os.path.exists(args.freesurfer): + print "Freesurfer file", args.freesurfer, "does not exist." + exit() outdir = args.outputDirectory if not os.path.exists(outdir): @@ -368,6 +376,8 @@ input_data = appender.GetOutput() del input_pds +input_freesurfer = nibabel.load(args.freesurfer) + # figure out which subject each fiber was from in the input to the clustering subject_fiber_list = list() for sidx in range(number_of_subjects): @@ -414,7 +424,7 @@ # Run clustering on the polydata print ' Starting clustering.' output_polydata_s, cluster_numbers_s, color, embed, distortion, atlas, reject_idx = \ - wma.cluster.spectral(input_data, number_of_clusters=number_of_clusters, \ + wma.cluster.spectral(input_data, input_freesurfer, number_of_clusters=number_of_clusters, \ number_of_jobs=number_of_jobs, use_nystrom=use_nystrom, \ nystrom_mask = nystrom_mask, \ number_of_eigenvectors=number_of_eigenvectors, \ @@ -440,7 +450,8 @@ print " Output directory", outdir1, "does not exist, creating it." os.makedirs(outdir1) print ' Saving output files in directory:', outdir1 - wma.cluster.output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, number_of_subjects, outdir1, cluster_numbers_s, color, embed, number_of_fibers_to_display, testing=testing, verbose=False, render_images=render) + wma.cluster.output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, input_freesurfer, number_of_subjects,\ + outdir1, cluster_numbers_s, color, embed, number_of_fibers_to_display, testing=testing, verbose=False, render_images=render) # Remove outliers from this iteration and save atlas again print "Starting local cluster outlier removal" @@ -498,7 +509,7 @@ farray = wma.fibers.FiberArray() farray.hemispheres = True farray.hemisphere_percent_threshold = 0.90 - farray.convert_from_polydata(pd_c, points_per_fiber=50) + farray.convert_from_polydata(pd_c, input_freesurfer, points_per_fiber=50) fiber_hemisphere[fiber_indices] = farray.fiber_hemisphere cluster_left_hem.append(farray.number_left_hem) cluster_right_hem.append(farray.number_right_hem) @@ -506,11 +517,11 @@ # Compute distances and fiber probabilities if distance_method == 'StrictSimilarity': - cluster_distances = wma.cluster._pairwise_distance_matrix(pd_c, 0.0, number_of_jobs=1, bilateral=bilateral, distance_method=distance_method, sigmasq = cluster_local_sigma * cluster_local_sigma) + cluster_distances = wma.cluster._pairwise_distance_matrix(pd_c, input_freesurfer, 0.0, number_of_jobs=1, bilateral=bilateral, distance_method=distance_method, sigmasq = cluster_local_sigma * cluster_local_sigma) cluster_similarity = cluster_distances else: - cluster_distances = wma.cluster._pairwise_distance_matrix(pd_c, 0.0, number_of_jobs=1, bilateral=bilateral, distance_method=distance_method) - cluster_similarity = wma.similarity.distance_to_similarity(cluster_distances, cluster_local_sigma * cluster_local_sigma) + cluster_distances, cluster_freeinfo = wma.cluster._pairwise_distance_matrix(pd_c, input_freesurfer, 0.0, number_of_jobs=1, bilateral=bilateral, distance_method=distance_method) + cluster_similarity = wma.similarity.distance_to_similarity(cluster_distances, cluster_freeinfo, cluster_local_sigma * cluster_local_sigma) #p(f1) = sum over all f2 of p(f1|f2) * p(f2) # by using sample we estimate expected value of the above @@ -651,7 +662,7 @@ # NOTE: compute and save mean fibers per cluster (add these into the atlas as another polydata) # Save the current atlas - wma.cluster.output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, number_of_subjects, outdir2, cluster_numbers_s, color, embed, number_of_fibers_to_display, testing=testing, verbose=False, render_images=render) + wma.cluster.output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, input_freesurfer, number_of_subjects, outdir2, cluster_numbers_s, color, embed, number_of_fibers_to_display, testing=testing, verbose=False, render_images=render) # now make the outlier clusters have positive numbers with -cluster_numbers_s so they can be saved also outdir3 = os.path.join(outdir2, 'outlier_tracts') @@ -661,7 +672,7 @@ print ' Saving outlier fiber files in directory:', outdir3 mask = cluster_numbers_s < 0 cluster_numbers_outliers = -numpy.multiply(cluster_numbers_s, mask) - 1 - wma.cluster.output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, number_of_subjects, outdir3, cluster_numbers_outliers, color, embed, number_of_fibers_to_display, testing=testing, verbose=False, render_images=False) + wma.cluster.output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, input_freesurfer, number_of_subjects, outdir3, cluster_numbers_outliers, color, embed, number_of_fibers_to_display, testing=testing, verbose=False, render_images=False) test = subject_fiber_list[numpy.nonzero(mask)] diff --git a/whitematteranalysis/cluster.py b/whitematteranalysis/cluster.py old mode 100644 new mode 100755 index 39fe7bee..9fcd1efe --- a/whitematteranalysis/cluster.py +++ b/whitematteranalysis/cluster.py @@ -176,7 +176,7 @@ def nearPSD(A,epsilon=0): out = B*B.T return(numpy.asarray(out)) -def spectral(input_polydata, number_of_clusters=200, +def spectral(input_polydata, input_freesurfer, number_of_clusters=200, number_of_eigenvectors=20, sigma=60, threshold=0.0, number_of_jobs=3, use_nystrom=False, nystrom_mask=None, landmarks=None, distance_method='Mean', normalized_cuts=True, @@ -252,11 +252,12 @@ def spectral(input_polydata, number_of_clusters=200, # Calculate fiber similarities A = \ - _pairwise_similarity_matrix(polydata_m, threshold, + _pairwise_similarity_matrix(polydata_m, input_freesurfer, threshold, sigma, number_of_jobs, landmarks_m, distance_method, bilateral) B = \ - _rectangular_similarity_matrix(polydata_n, polydata_m, threshold, + _rectangular_similarity_matrix(polydata_n, polydata_m, input_freesurfer, threshold, sigma, number_of_jobs, landmarks_n, landmarks_m, distance_method, bilateral) + # sanity check print " Range of values in A:", numpy.min(A), numpy.max(A) @@ -265,7 +266,7 @@ def spectral(input_polydata, number_of_clusters=200, else: # Calculate all fiber similarities A = \ - _pairwise_similarity_matrix(input_polydata, threshold, + _pairwise_similarity_matrix(input_polydata, input_freesurfer, threshold, sigma, number_of_jobs, landmarks, distance_method, bilateral) atlas.nystrom_polydata = input_polydata @@ -301,9 +302,7 @@ def spectral(input_polydata, number_of_clusters=200, A = numpy.delete(A,reject_A,0) A = numpy.delete(A,reject_A,1) - #print A.shape, B.shape B = numpy.delete(B,reject_A,0) - #print A.shape, B.shape, reorder_embedding.shape # Ensure that A is positive definite. if pos_def_approx: @@ -315,9 +314,9 @@ def spectral(input_polydata, number_of_clusters=200, testval = numpy.max(A-A2) if not testval == 0.0: print " A matrix differs by PSD matrix by maximum of:", testval - if testval > 0.25: - print " ERROR: A matrix changed by more than 0.25." - raise AssertionError +# if testval > 0.25: +# print " ERROR: A matrix changed by more than 0.25." +# raise AssertionError A = A2 # 2) Do Normalized Cuts transform of similarity matrix. @@ -493,7 +492,7 @@ def spectral(input_polydata, number_of_clusters=200, cluster_metric = None if centroid_finder == 'K-means': print ' K-means clustering in embedding space.' - centroids, cluster_metric = scipy.cluster.vq.kmeans2(embed, number_of_clusters, minit='points') + centroids, cluster_metric = scipy.cluster.vq.kmeans2(embed, number_of_clusters, iter =50, minit='points') # sort centroids by first eigenvector order # centroid_order = numpy.argsort(centroids[:,0]) # sort centroids according to colormap and save them in this order in atlas @@ -635,7 +634,7 @@ def spectral_atlas_label(input_polydata, atlas, number_of_jobs=2): return output_polydata, cluster_idx, color, embed -def _rectangular_distance_matrix(input_polydata_n, input_polydata_m, threshold, +def _rectangular_distance_matrix(input_polydata_n, input_polydata_m, input_freesurfer, threshold, number_of_jobs=3, landmarks_n=None, landmarks_m=None, distance_method='Hausdorff', bilateral=False): @@ -657,31 +656,44 @@ def _rectangular_distance_matrix(input_polydata_n, input_polydata_m, threshold, else: fiber_array_n = fibers.FiberArray() - fiber_array_n.convert_from_polydata(input_polydata_n, points_per_fiber=15) + fiber_array_n.convert_from_polydata(input_polydata_n, input_freesurfer, points_per_fiber=15) fiber_array_m = fibers.FiberArray() - fiber_array_m.convert_from_polydata(input_polydata_m, points_per_fiber=15) + fiber_array_m.convert_from_polydata(input_polydata_m, input_freesurfer, points_per_fiber=15) if landmarks_n is None: landmarks_n = numpy.zeros((fiber_array_n.number_of_fibers,3)) # pairwise distance matrix - all_fibers_n = range(0, fiber_array_n.number_of_fibers) - - distances = Parallel(n_jobs=number_of_jobs, - verbose=0)( - delayed(similarity.fiber_distance)( +# all_fibers_n = range(0, fiber_array_n.number_of_fibers) +# +# distances = Parallel(n_jobs=number_of_jobs, +# verbose=0)( +# delayed(similarity.fiber_distance)( +# fiber_array_n.get_fiber(lidx), +# fiber_array_m, +# threshold, distance_method=distance_method, +# fiber_landmarks=landmarks_n[lidx,:], +# landmarks=landmarks_m, bilateral=bilateral) +# for lidx in all_fibers_n) +# +# distances = numpy.array(distances).T + distances = numpy.zeros([fiber_array_n.number_of_fibers,fiber_array_m.number_of_fibers]) + freeinfo = numpy.zeros([fiber_array_n.number_of_fibers,fiber_array_m.number_of_fibers]) + for lidx in xrange(0,fiber_array_n.number_of_fibers): + distances[lidx,:], freeinfo[lidx,:] = similarity.fiber_distance( fiber_array_n.get_fiber(lidx), fiber_array_m, threshold, distance_method=distance_method, fiber_landmarks=landmarks_n[lidx,:], landmarks=landmarks_m, bilateral=bilateral) - for lidx in all_fibers_n) distances = numpy.array(distances).T + freeinfo = numpy.array(freeinfo).T +# numpy.save('freeinfo.npy',freeinfo) - return distances + return distances, freeinfo -def _rectangular_similarity_matrix(input_polydata_n, input_polydata_m, threshold, sigma, +def _rectangular_similarity_matrix(input_polydata_n, input_polydata_m, input_freesurfer, threshold, sigma, number_of_jobs=3, landmarks_n=None, landmarks_m=None, distance_method='Hausdorff', bilateral=False): @@ -696,7 +708,7 @@ def _rectangular_similarity_matrix(input_polydata_n, input_polydata_m, threshold """ - distances = _rectangular_distance_matrix(input_polydata_n, input_polydata_m, threshold, + distances, freeinfo = _rectangular_distance_matrix(input_polydata_n, input_polydata_m, input_freesurfer, threshold, number_of_jobs, landmarks_n, landmarks_m, distance_method, bilateral=bilateral) if distance_method == 'StrictSimilarity': @@ -704,11 +716,11 @@ def _rectangular_similarity_matrix(input_polydata_n, input_polydata_m, threshold else: # similarity matrix sigmasq = sigma * sigma - similarity_matrix = similarity.distance_to_similarity(distances, sigmasq) + similarity_matrix = similarity.distance_to_similarity(distances, freeinfo, sigmasq) return similarity_matrix -def _pairwise_distance_matrix(input_polydata, threshold, +def _pairwise_distance_matrix(input_polydata, input_freesurfer, threshold, number_of_jobs=3, landmarks=None, distance_method='Hausdorff', bilateral=False, sigmasq=6400): @@ -728,33 +740,44 @@ def _pairwise_distance_matrix(input_polydata, threshold, else: fiber_array = fibers.FiberArray() - fiber_array.convert_from_polydata(input_polydata, points_per_fiber=15) + fiber_array.convert_from_polydata(input_polydata, input_freesurfer, points_per_fiber=15) # pairwise distance matrix - all_fibers = range(0, fiber_array.number_of_fibers) +# all_fibers = range(0, fiber_array.number_of_fibers) if landmarks is None: landmarks2 = numpy.zeros((fiber_array.number_of_fibers,3)) else: landmarks2 = landmarks - distances = Parallel(n_jobs=number_of_jobs, - verbose=0)( - delayed(similarity.fiber_distance)( +# distances = Parallel(n_jobs=number_of_jobs, +# verbose=0)( +# delayed(similarity.fiber_distance)( +# fiber_array.get_fiber(lidx), +# fiber_array, +# threshold, distance_method=distance_method, +# fiber_landmarks=landmarks2[lidx,:], +# landmarks=landmarks, bilateral=bilateral, sigmasq=sigmasq) +# for lidx in all_fibers) + distances = numpy.zeros([fiber_array.number_of_fibers,fiber_array.number_of_fibers]) + freeinfo = numpy.zeros([fiber_array.number_of_fibers,fiber_array.number_of_fibers]) + for lidx in xrange(0,fiber_array.number_of_fibers): + distances[lidx,:], freeinfo[lidx,:] = similarity.fiber_distance( fiber_array.get_fiber(lidx), fiber_array, threshold, distance_method=distance_method, fiber_landmarks=landmarks2[lidx,:], landmarks=landmarks, bilateral=bilateral, sigmasq=sigmasq) - for lidx in all_fibers) +# numpy.save('distances.npy',distances) +# numpy.save('freeinfo.npy',freeinfo) - distances = numpy.array(distances) +# distances = numpy.array(distances) # remove outliers if desired???? - return distances + return distances, freeinfo -def _pairwise_similarity_matrix(input_polydata, threshold, sigma, +def _pairwise_similarity_matrix(input_polydata, input_freesurfer, threshold, sigma, number_of_jobs=3, landmarks=None, distance_method='Hausdorff', bilateral=False): @@ -769,7 +792,7 @@ def _pairwise_similarity_matrix(input_polydata, threshold, sigma, """ - distances = _pairwise_distance_matrix(input_polydata, threshold, + distances, freeinfo = _pairwise_distance_matrix(input_polydata, input_freesurfer, threshold, number_of_jobs, landmarks, distance_method, bilateral=bilateral) if distance_method == 'StrictSimilarity': @@ -777,21 +800,21 @@ def _pairwise_similarity_matrix(input_polydata, threshold, sigma, else: # similarity matrix sigmasq = sigma * sigma - similarity_matrix = similarity.distance_to_similarity(distances, sigmasq) + similarity_matrix = similarity.distance_to_similarity(distances, freeinfo, sigmasq) # sanity check that on-diagonal elements are all 1 #print "This should be 1.0: ", numpy.min(numpy.diag(similarity_matrix)) #print numpy.min(numpy.diag(similarity_matrix)) == 1.0 # test - if __debug__: - # this tests that on-diagonal elements are all 1 - test = numpy.min(numpy.diag(similarity_matrix)) == 1.0 - if not test: - print " ERROR: On-diagonal elements are not all 1.0." - print" Minimum on-diagonal value:", numpy.min(numpy.diag(similarity_matrix)) - print" Maximum on-diagonal value:", numpy.max(numpy.diag(similarity_matrix)) - print" Mean value:", numpy.mean(numpy.diag(similarity_matrix)) - raise AssertionError +# if __debug__: +# # this tests that on-diagonal elements are all 1 +# test = numpy.min(numpy.diag(similarity_matrix)) == 1.0 +# if not test: +# print " ERROR: On-diagonal elements are not all 1.0." +# print" Minimum on-diagonal value:", numpy.min(numpy.diag(similarity_matrix)) +# print" Maximum on-diagonal value:", numpy.max(numpy.diag(similarity_matrix)) +# print" Mean value:", numpy.mean(numpy.diag(similarity_matrix)) +# raise AssertionError return similarity_matrix @@ -899,7 +922,7 @@ def _embed_to_rgb(embed): return color -def output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, number_of_subjects, outdir, cluster_numbers_s, color, embed, number_of_fibers_to_display, testing=False, verbose=False, render_images=True): +def output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_fiber_list, input_polydatas, input_freesurfer, number_of_subjects, outdir, cluster_numbers_s, color, embed, number_of_fibers_to_display, testing=False, verbose=False, render_images=True): """Save the output in our atlas format for automatic labeling of clusters. @@ -1022,7 +1045,7 @@ def output_and_quality_control_cluster_atlas(atlas, output_polydata_s, subject_f farray = fibers.FiberArray() farray.hemispheres = True farray.hemisphere_percent_threshold = 0.90 - farray.convert_from_polydata(pd_c, points_per_fiber=50) + farray.convert_from_polydata(pd_c, input_freesurfer, points_per_fiber=50) filter.add_point_data_array(pd_c, farray.fiber_hemisphere, "Hemisphere") # The clusters are stored starting with 1, not 0, for user friendliness. fname_c = 'cluster_{0:05d}.vtp'.format(c+1) diff --git a/whitematteranalysis/fibers.pyx b/whitematteranalysis/fibers.pyx old mode 100644 new mode 100755 index a05960e3..1b3c279b --- a/whitematteranalysis/fibers.pyx +++ b/whitematteranalysis/fibers.pyx @@ -10,7 +10,36 @@ class FiberArray import numpy import vtk import time - +from nibabel.affines import apply_affine + +def region_label_LR(label): + # combine left and right + left_sub_cortical_regions = [2, 7, 8, 10, 11, 12, 13, 17, 18, 26, 28] + right_sub_cortical_regions = [41, 46, 47, 49, 50, 51, 52, 53, 54, 58, 60] + + left_GM_cortical_regions = range(1001, 1036) + right_GM_cortical_regions = range(2001, 2036) + + left_WM_cortical_regions = range(3001, 3036) + right_WM_cortical_regions = range(4001, 4036) + + CC_regions = range(251, 256) + commissural_sub_cortical_regions = [16, 77, 85] + + WM_Unsegmented = [5001, 5002] + + for i in right_WM_cortical_regions: + label[label == i] = i - 2000 - 1000 + for i in left_WM_cortical_regions: + label[label == i] = i - 2000 + for i in right_GM_cortical_regions: + label[label == i] = i - 1000 + for i in WM_Unsegmented: + label[label == i] = 5001 + for i in right_sub_cortical_regions: + label[label == i] = left_sub_cortical_regions[right_sub_cortical_regions.index(i)] + + return label class Fiber: """A class for fiber tractography data, represented with a fixed length""" @@ -19,6 +48,7 @@ class Fiber: self.r = None self.a = None self.s = None + self.H = None self.points_per_fiber = None self.hemisphere_percent_threshold = 0.95 @@ -31,6 +61,7 @@ class Fiber: fiber.r = self.r[::-1] fiber.a = self.a[::-1] fiber.s = self.s[::-1] + fiber.H = self.H fiber.points_per_fiber = self.points_per_fiber @@ -45,6 +76,7 @@ class Fiber: fiber.r = - self.r fiber.a = self.a fiber.s = self.s + fiber.H = self.H fiber.points_per_fiber = self.points_per_fiber @@ -118,6 +150,8 @@ class FiberArray: self.fiber_array_r = None self.fiber_array_a = None self.fiber_array_s = None + + self.H = None # output arrays indicating hemisphere/callosal (L,C,R= -1, 0, 1) self.fiber_hemisphere = None @@ -190,6 +224,8 @@ class FiberArray: fiber.r = self.fiber_array_r[fiber_index, :] fiber.a = self.fiber_array_a[fiber_index, :] fiber.s = self.fiber_array_s[fiber_index, :] + + fiber.H = self.H[fiber_index, :] fiber.points_per_fiber = self.points_per_fiber @@ -316,7 +352,7 @@ class FiberArray: return fibers - def convert_from_polydata(self, input_vtk_polydata, points_per_fiber=None): + def convert_from_polydata(self, input_vtk_polydata, input_freesurfer, points_per_fiber=None): """Convert input vtkPolyData to the fixed length fiber representation of this class. @@ -339,6 +375,11 @@ class FiberArray: print " Converting polydata to array representation. Lines:", \ self.number_of_fibers + # load freesurfer infomation + labelArray = input_freesurfer.get_data() + labelArray = region_label_LR(labelArray) + c = numpy.unique(labelArray) + # allocate array number of lines by line length self.fiber_array_r = numpy.zeros((self.number_of_fibers, self.points_per_fiber)) @@ -346,6 +387,8 @@ class FiberArray: self.points_per_fiber)) self.fiber_array_s = numpy.zeros((self.number_of_fibers, self.points_per_fiber)) + self.H = numpy.zeros((self.number_of_fibers, + 27, c.shape[0])) # loop over lines input_vtk_polydata.GetLines().InitTraversal() @@ -376,6 +419,24 @@ class FiberArray: self.fiber_array_a[lidx, pidx] = point[1] self.fiber_array_s[lidx, pidx] = point[2] pidx = pidx + 1 + + #freesurfer infomation + U = numpy.zeros([27, line_length]) + for index in range(0, line_length): + ptidx = line_ptids.GetId(index) + point = inpoints.GetPoint(ptidx) + point_ijk = apply_affine(numpy.linalg.inv(input_freesurfer.affine), point) + ijk = numpy.rint(point_ijk).astype(numpy.int32) + U[:, index] = numpy.reshape(labelArray[(ijk[0] - 1):(ijk[0] + 2), (ijk[1] - 1):(ijk[1] + 2), (ijk[2] - 1):(ijk[2] + 2)], 27) + + for i in range(0,27): + V = numpy.unique(U[i,:]) + for j in range(0, V.shape[0]): + k = numpy.sum(U[i,:] == V[j]) + t = numpy.argwhere(c == V[j]) + self.H[lidx, i, t] = k / float(line_length) + + # initialize hemisphere info if self.hemispheres: diff --git a/whitematteranalysis/similarity.pyx b/whitematteranalysis/similarity.pyx old mode 100644 new mode 100755 index d6a2d934..7a0ab25f --- a/whitematteranalysis/similarity.pyx +++ b/whitematteranalysis/similarity.pyx @@ -6,10 +6,12 @@ import sys sys.setrecursionlimit(1000000) -def distance_to_similarity(distance, sigmasq=100): +def distance_to_similarity(distance, freeinfo, sigmasq=100): # compute the similarities using Gaussian kernel - similarities = numpy.exp(-distance / (sigmasq)) +# similarities = numpy.exp(-distance / (sigmasq)) * freeinfo + similarities = 0.8 * numpy.exp(-distance / (sigmasq)) + 0.2 * numpy.exp(-1 / freeinfo) +# similarities = freeinfo return similarities @@ -27,8 +29,8 @@ def fiber_distance(fiber, fiber_array, threshold=0, distance_method='MeanSquared fiber_equiv = fiber.get_equivalent_fiber() # compute pairwise fiber distances along fibers - distance_1 = _fiber_distance_internal_use(fiber, fiber_array, threshold, distance_method, fiber_landmarks, landmarks, sigmasq) - distance_2 = _fiber_distance_internal_use(fiber_equiv, fiber_array, threshold, distance_method, fiber_landmarks, landmarks, sigmasq) + distance_1, freeinfo_1 = _fiber_distance_internal_use(fiber, fiber_array, threshold, distance_method, fiber_landmarks, landmarks, sigmasq) + distance_2, freeinfo_2 = _fiber_distance_internal_use(fiber_equiv, fiber_array, threshold, distance_method, fiber_landmarks, landmarks, sigmasq) # choose the lowest distance, corresponding to the optimal fiber # representation (either forward or reverse order) @@ -36,22 +38,26 @@ def fiber_distance(fiber, fiber_array, threshold=0, distance_method='MeanSquared # for use in laterality # this is the product of all similarity values along the fiber distance = numpy.maximum(distance_1, distance_2) + freeinfo = numpy.minimun(freeinfo_1, freeinfo_2) else: distance = numpy.minimum(distance_1, distance_2) + freeinfo = numpy.maximum(freeinfo_1, freeinfo_2) if bilateral: fiber_reflect = fiber.get_reflected_fiber() # call this function again with the reflected fiber. Do NOT reflect again (bilateral=False) to avoid infinite loop. - distance_reflect = fiber_distance(fiber_reflect, fiber_array, threshold, distance_method, fiber_landmarks, landmarks, sigmasq, bilateral=False) + distance_reflect, freeinfo_reflect = fiber_distance(fiber_reflect, fiber_array, threshold, distance_method, fiber_landmarks, landmarks, sigmasq, bilateral=False) # choose the best distance, corresponding to the optimal fiber # representation (either reflected or not) if distance_method == 'StrictSimilarity': # this is the product of all similarity values along the fiber distance = numpy.maximum(distance, distance_reflect) + freeinfo = numpy.minimun(freeinfo, freeinfo_reflect) else: distance = numpy.minimum(distance, distance_reflect) + freeinfo = numpy.maximum(freeinfo, freeinfo_reflect) - return distance + return distance, freeinfo def fiber_distance_oriented(fiber, fiber_array, threshold=0, distance_method='MeanSquared', fiber_landmarks=None, landmarks=None, sigmasq=6400, bilateral=False): """Find pairwise fiber distance from fiber to all fibers in fiber_array, without testing both fiber orders. @@ -102,6 +108,10 @@ def _fiber_distance_internal_use(fiber, fiber_array, threshold=0, distance_metho dx = numpy.square(ddx) dy = numpy.square(ddy) dz = numpy.square(ddz) + + freeinfo = fiber_array.H * fiber.H + freeinfo = numpy.sum(numpy.sum(freeinfo != 0, 2) * numpy.sum(freeinfo, 2), 1) + # sum dx dx dz at each point on the fiber and sqrt for threshold #distance = numpy.sqrt(dx + dy + dz) @@ -184,7 +194,7 @@ def _fiber_distance_internal_use(fiber, fiber_array, threshold=0, distance_metho raise Exception("unknown distance method") - return distance + return distance, freeinfo def rectangular_frechet_distances(input_vtk_polydata_m,input_vtk_polydata_n): # Computes distance matrix (nxm) for all n+m fibers in input