4
4
from functools import reduce
5
5
from frame_blobs import frame_blobs_root , intra_blob_root , imread
6
6
from comp_slice import comp_latuple , comp_md_
7
- from vect_edge import feedback , comp_node_ , comp_link_ , sum2graph , get_rim , CH , CG , ave , ave_L , vectorize_root , comp_area , extend_box , val_
7
+ from vect_edge import feedback , comp_node_ , comp_link_ , sum_G_ , sum2graph , get_rim , CH , CG , ave , ave_L , vectorize_root , comp_area , extend_box , val_
8
8
'''
9
9
Cross-compare and cluster Gs within a frame, potentially unpacking their node_s first,
10
10
alternating agglomeration and centroid clustering.
16
16
capitalized vars are summed small-case vars
17
17
'''
18
18
19
- def cross_comp (root ): # breadth-first node_,link_ cross-comp, connect.clustering, recursion
19
+ def cross_comp (root , falt = 0 ): # breadth-first node_,link_ cross-comp, connect.clustering, recursion
20
20
21
- N_ ,L_ ,Et = comp_node_ (root .subG_ ) # cross-comp exemplars, extrapolate to their node_s
21
+ N_ ,L_ ,Et = comp_node_ (root .altG_ if falt else root . node_ ) # cross-comp exemplars, extrapolate to their node_s
22
22
# mfork
23
23
if val_ (Et , fo = 1 ) > 0 :
24
24
mlay = CH ().add_tree ([L .derH for L in L_ ]); H = root .derH ; mlay .root = H ; H .Et += mlay .Et ; H .lft = [mlay ]
25
25
pL_ = {l for n in N_ for l ,_ in get_rim (n , fd = 0 )}
26
26
if len (pL_ ) > ave_L :
27
- cluster_N_ (root , pL_ , fd = 0 ) # nested distance clustering, calls centroid and higher connect.clustering
27
+ G_ = cluster_N_ (root , pL_ , fd = 0 , falt = falt ) # nested distance clustering, calls centroid and higher connect.clustering
28
28
# dfork
29
- if val_ (Et , mEt = Et , fo = 1 ) > 0 : # same root for L_, root.link_ was compared in root-forming for alt clustering
29
+ if val_ ( Et , mEt = Et , fo = 1 ) > 0 : # same root for L_, root.link_ was compared in root-forming for alt clustering
30
30
for L in L_ :
31
31
L .extH , L .root , L .Et , L .mL_t , L .rimt , L .aRad , L .visited_ = CH (),root ,copy (L .derH .Et ), [[],[]], [[],[]], 0 ,[L ]
32
32
lN_ ,lL_ ,dEt = comp_link_ (L_ ,Et )
33
33
if val_ (dEt , mEt = Et , fo = 1 ) > 0 :
34
34
dlay = CH ().add_tree ([L .derH for L in lL_ ]); dlay .root = H ; H .Et += dlay .Et ; H .lft += [dlay ]
35
35
plL_ = {l for n in lN_ for l ,_ in get_rim (n , fd = 1 )}
36
36
if len (plL_ ) > ave_L :
37
- cluster_N_ (root , plL_ , fd = 1 )
38
-
37
+ cluster_N_ (root , plL_ , fd = 1 , falt = falt )
38
+ # draft ( i think this should be here? Right after d fork, else G doesn't have altG_ too):
39
+ for G in G_ :
40
+ if G .altG_ : # non empty
41
+ # cross-comp | sum altG_ -> combined altG before next agg+ cross-comp
42
+ if val_ (np .sum ([alt .Et for alt in G .altG_ ],axis = 0 ), mEt = G .Et ):
43
+ cross_comp (G , falt = 1 ) # altG_ will be updated within cluster_N_
44
+ else :
45
+ # why we need to sum them into single altG here?
46
+ G .altG_ = sum_G_ ([alt for alt in G .altG_ ])
39
47
feedback (root ) # add root derH to higher roots derH
40
48
41
- def cluster_N_ (root , L_ , fd , nest = 0 ): # top-down segment L_ by >ave ratio of L.dists
49
+ def cluster_N_ (root , L_ , fd , nest = 0 , falt = 0 ): # top-down segment L_ by >ave ratio of L.dists
42
50
43
51
L_ = sorted (L_ , key = lambda x : x .dist ) # shorter links first
44
- for n in [n for l in L_ for n in l .nodet ]: n .fin = 0
45
52
_L = L_ [0 ]
46
53
N_ , et = copy (_L .nodet ), _L .derH .Et
47
54
L_ = L_ [1 :]
55
+ G_ = [] # this should be outside while loop?
48
56
while L_ : # longer links
57
+ for n in [n for l in L_ for n in l .nodet ]: n .fin = 0 # this should be here? Since we may get a same N in a later segment
49
58
for i , L in enumerate (L_ ): # short first
50
59
rel_dist = L .dist / _L .dist # >= 1
60
+ # there's a problem here
51
61
if rel_dist < 1.2 or val_ (et )> 0 or len (L_ [i :]) < ave_L : # ~=dist Ns or either side of L is weak
52
62
_L = L ; N_ += L .nodet ; et += L .derH .Et # last L
53
63
else :
54
64
i -= 1 ; break # terminate contiguous-distance segment
55
- G_ = []
65
+
56
66
max_dist = _L .dist
57
67
for N in {* N_ }: # cluster current distance segment
58
68
_eN_ , node_ ,link_ , et , = [N ], [],[], np .zeros (4 )
59
69
while _eN_ :
60
70
eN_ = []
61
71
for eN in _eN_ : # cluster rim-connected ext Ns, all in root Gt
72
+ if eN .fin : continue # this is missed out?
62
73
node_ += [eN ]; eN .fin = 1 # all rim
63
74
for L ,_ in get_rim (eN , fd ):
64
75
if L not in link_ : # if L.derH.Et[0]/ave * n.extH m/ave or L.derH.Et[0] + n.extH m*.1: density?
@@ -69,11 +80,19 @@ def cluster_N_(root, L_, fd, nest=0): # top-down segment L_ by >ave ratio of L.
69
80
G_ += [sum2graph (root , [list ({* node_ }),list ({* link_ }), et ], fd , max_dist , nest )]
70
81
# cluster node roots if nest else nodes
71
82
nest += 1
72
- if fd : root .link_ = G_ # replace with current-dist clusters
73
- else : root .node_ = G_
74
83
L_ = L_ [i + 1 :] # get longer links if any, may connect current-dist clusters
75
84
N_ = []
76
- cluster_C_ (root )
85
+
86
+ # below should be outside while loop? G_ will be updated only when the loops end
87
+ if falt :
88
+ if not fd : root .altG_ = G_ # for d fork, no further update since we do not have alt_link_?
89
+ else :
90
+ if fd : root .link_ = G_ # replace with current-dist clusters
91
+ else : root .node_ = G_
92
+ cluster_C_ (root , falt = falt )
93
+
94
+ return G_
95
+
77
96
78
97
''' Hierarchical clustering should alternate between two phases: generative via connectivity and compressive via centroid.
79
98
@@ -86,15 +105,15 @@ def cluster_N_(root, L_, fd, nest=0): # top-down segment L_ by >ave ratio of L.
86
105
So connectivity clustering is a generative learning phase, forming new derivatives and structured composition levels,
87
106
while centroid clustering is a compressive phase, reducing multiple similar comparands to a single exemplar. '''
88
107
89
- def cluster_C_ (graph ):
108
+ def cluster_C_ (graph , falt = 0 ):
90
109
91
110
def centroid (dnode_ , node_ , C = None ): # sum|subtract and average Rim nodes
92
111
93
112
if C is None :
94
113
C = CG (); C .L = 0 ; C .M = 0 # setattr summed len node_ and match to nodes
95
114
for n in dnode_ :
96
115
s = n .sign ; n .sign = 1 # single-use sign
97
- C .Et += n .Et * s ; C .rng = n . rng * s ; C . aRad += n .aRad * s
116
+ C .Et += n .Et * s ; C .aRad += n .aRad * s
98
117
C .L += len (n .node_ ) * s
99
118
C .latuple += n .latuple * s
100
119
C .vert += n .vert * s
@@ -155,7 +174,7 @@ def centroid_cluster(N): # refine and extend cluster with extN_
155
174
return N # keep seed node
156
175
157
176
# get representative centroids of complemented Gs: mCore + dContour, initially in unpacked edges
158
- N_ = sorted ([N for N in graph .node_ if any (N .Et )], key = lambda n : n .Et [0 ], reverse = True )
177
+ N_ = sorted ([N for N in ( graph .altG_ if falt else graph . node_ ) if any (N .Et )], key = lambda n : n .Et [0 ], reverse = True )
159
178
G_ = []
160
179
for N in N_ :
161
180
N .sign , N .m , N .fin = 1 , 0 , 0 # setattr: C update sign, inclusion val, prior C inclusion flag
@@ -166,33 +185,20 @@ def centroid_cluster(N): # refine and extend cluster with extN_
166
185
else : # the rest of N_ M is lower
167
186
G_ += [N for N in N_ [i :] if not N .fin ]
168
187
break
169
- # draft:
170
- for G in G_ :
171
- # cross-comp | sum altG_ -> combined altG before next agg+ cross-comp
172
- if val_ (np .sum ([alt .Et for alt in G .altG_ ]), mEt = G .Et ):
173
- G .altG_ = cross_comp (G , G .altG_ )
174
- else :
175
- G .altG_ = reduce (sum_G_ , [alt for alt in G .altG_ ])
176
-
177
- graph .node_ = G_ # mix of Ns and Cs: exemplars of their network?
188
+
189
+ if falt : graph .altG_ = G_
190
+ else : graph .node_ = G_ # mix of Ns and Cs: exemplars of their network?
178
191
if len (G_ ) > ave_L :
179
- cross_comp (graph ) # selective connectivity clustering between exemplars, extrapolated to their node_
192
+ cross_comp (graph , falt ) # selective connectivity clustering between exemplars, extrapolated to their node_
180
193
181
- def sum_G_ (node_ ):
182
- G = CG ()
183
- for n in node_ :
184
- G .rng = n .rng ; G .latuple += n .latuple ; G .vert += n .vert ; G .aRad += n .aRad ; G .box = extend_box (G .box , n .box )
185
- if n .derH : G .derH .add_tree (n .derH , root = G )
186
- if n .extH : G .extH .add_tree (n .extH )
187
- return G
188
194
189
195
if __name__ == "__main__" :
190
196
image_file = './images/raccoon_eye.jpeg'
191
197
image = imread (image_file )
192
198
frame = frame_blobs_root (image )
193
199
intra_blob_root (frame )
194
200
vectorize_root (frame )
195
- if frame .subG_ : # converted edges
201
+ if frame .node_ : # converted edges
196
202
G_ = []
197
203
for edge in frame .node_ :
198
204
cluster_C_ (edge ) # no cluster_C_ in vect_edge
0 commit comments