-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathworking_functions.py
More file actions
334 lines (263 loc) · 12.2 KB
/
working_functions.py
File metadata and controls
334 lines (263 loc) · 12.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
#Python file for working functions.
#Necessary modules
from scipy import spatial
from scipy import ndimage
from scipy import stats
from math import *
import numpy as np
import copy
import os
import cv2
import operator
import numpy.ma as ma
import time
import pickle as pkl
##Classes
class fly_data(object):
"""fly_data gathers the position and orientation of a given fly all along the video."""
def __init__(self, data_dict, identity):
self.length = data_dict['orientations'][0]
self.positions = data_dict['trajectories'][:,identity] #The position of the fly for each frame in pixels of the video.
self.identity = identity
self.directions = data_dict['directions'][:,identity]
self.orientations = data_dict['orientations'][:,identity]
def __str__(self):
print(self, 'is a fly_data object')
print('Number of frames : ', self.length)
print('Identity in the video : ',self.identity)
def ori_correc(self, overwrite=False): #Outputs corrected orientations based on inputted directions.
#Should be given two arrays of numbers
correc = convert180to360(self.orientations) #Put orientations in a 360 degrees format
flipped = flip(correc) #Flips orientations
minus = flip(correc, -90)
plus = flip(correc, 90)
#Calculate correlation for the 4 possibilities
correl = dict()
correl['flipped'] = ma.corrcoef(ma.masked_invalid(flipped), ma.masked_invalid(self.directions))[1,0]
correl['minus'] = ma.corrcoef(ma.masked_invalid(minus), ma.masked_invalid(self.directions))[1,0]
correl['plus'] = ma.corrcoef(ma.masked_invalid(plus), ma.masked_invalid(self.directions))[1,0]
correl['correc'] = ma.corrcoef(ma.masked_invalid(correc), ma.masked_invalid(self.directions))[1,0]
#get optimum
opti = max(correl.items(), key=operator.itemgetter(1))[0]
#Outputs better corrected orientations
if opti == 'flipped':
print("Orientations have been corrected and flipped 180 degrees.")
print("Correlation is ", correl['flipped'])
if overwrite == True:
self.orientations = flipped
else:
return flipped
elif opti == 'minus':
print("orientations have been corrected and flipped -90 degrees")
print("Correlation is ", correl['minus'])
if overwrite == True:
self.orientations = minus
else:
return minus
elif opti == 'plus':
print("orientations have been corrected and flipped +90 degrees")
print("Correlation is ", correl['plus'])
if overwrite == True:
self.orientations = plus
else:
return plus
elif opti == 'correc':
print("orientations have been corrected only")
print("Correlation is ", correl['correc'])
if overwrite == True:
self.orientations = correc
else:
return correc
class relative_fly(object): #Should be given two fly objects, and calculate relative distance, speed, angle from focus to other fly.
def __init__(self, focusfly, newfly):
"""Should be given two fly_data objects. Produces relative position and angle of the second fly to the first."""
self.positions = newfly.positions - focusfly.positions
xs = self.positions[:,0]
ys = self.positions[:,1]
self.dist, absol_angle = angleFromPoints(xs,ys)
angle = absol_angle - focusfly.orientations
angle[angle < 0] += 360
self.angle = angle
self.speed = self.dist[1:] - self.dist[:-1]
self.speed = np.insert(self.speed, 0,0)
class relative_set(object): #Given a list of fly_data objects, calculates and outputs the relative_fly of every fly to the others.
def __init__(self, flystack):
pos_stack = np.empty([1,2])
dist_stack = np.empty([])
angle_stack = np.empty([])
for focus in flystack:
for fly in flystack:
if focus != fly:
rel = relative_fly(focus, fly)
angle = np.radians(rel.angle)
ys = rel.dist*np.sin(angle)
xs = rel.dist*np.cos(angle)
pos = np.column_stack([xs,ys])
pos_stack = np.vstack([pos_stack, pos])
dist_stack = np.hstack([dist_stack, rel.dist])
angle_stack = np.hstack([angle_stack, rel.angle])
pos_stack = pos_stack[1:,]
self.positions = pos_stack
self.dist = dist_stack
self.orientations = angle_stack
#Functions
def get_ori(Dir, video, traj, thresh = 50, noise_thresh=0.5): #Gets raw orientations and directions from numpy array of positions and video file.
#Both .npy file and video file should be in the same Dir folder.
#Fantastic files and where to find them
path_to_vid = r'{}{}{}'.format(Dir, os.sep, video)
path_to_traj = r'{}{}{}'.format(Dir, os.sep, traj)
cap = cv2.VideoCapture(path_to_vid)
trajectories_dict = np.load(path_to_traj, encoding='latin1').item() #Trajectorie data from idtrackerai.
trajectories = trajectories_dict['trajectories']
#Some information that will be useful later
nberflies = len(trajectories_dict['trajectories'][0]) #Get number of individuals from trajectories
framerate = cap.get(cv2.CAP_PROP_FPS)
i = 0 #Index for taking the right trajectories.
while(cap.isOpened()):
#for _ in range(500): #To be used to make it run faster if needed
#Take the next image from the video and turn it grey
ret, img = cap.read()
if img is not None:
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
else: #If no more images, break the loop
cap.release()
continue
#Set the threshold and get the binary image
ret, diff_img = cv2.threshold(img,thresh,255,cv2.THRESH_BINARY)
#Get a bunch of contours from the binary
contours,hierarchy = cv2.findContours(diff_img,cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
#For every contours, get its center, and put contour in a dict with center as key
centers = list()
right_contours = list()
for c in contours:
if len(c)>15 and len(c)<100: #contour should be in a reasonable size range
M = cv2.moments(c)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
centers.append([cX, cY]) #Add center to the list
right_contours.append(c) #Add corresponding contour to a similar list
#Get the trajectory data from idtrackerai
positions = trajectories[i]
ori_frame = list() #A list of orientations for the current frame
for fly in positions:
if (np.isfinite(fly[0])):
ind = spatial.KDTree(centers).query(fly)[1]
ellipse = cv2.fitEllipse(right_contours[ind]) #Fitting the ellipse
orientation = np.float(ellipse[2])
ori_frame.append(orientation)
else:
ori_frame.append(np.nan)
ori_frame = np.array(ori_frame)
try:
ori_stack
except NameError:
ori_stack = ori_frame
else:
ori_stack = np.vstack([ori_stack, ori_frame])
#Get directions part :
shape_pos = np.arange(nberflies)
if i == 0: #If first iteration, everything is zero
direc_stack = np.zeros_like(shape_pos)
speed_stack = np.zeros_like(shape_pos)
dist_stack = np.zeros_like(shape_pos)
direc = np.zeros_like(shape_pos)
elif isinstance(i, int): #If not the first iteration
dist, direc = get_angle(trajectories, i, direc, noise_thresh)
speed = dist*framerate
dist_stack = np.vstack([dist_stack, dist]) # distance traveled in pixels
speed_stack = np.vstack([speed_stack, speed]) #Speed in pixels per second
direc_stack = np.vstack([direc_stack, direc]) #direction in degrees
i+=1 #An index so that we take the right trajectories for every frame.
data = dict() #Dictionary in which we will put the three stacks of data.
data['trajectories'] = trajectories
data['speeds'] = speed_stack
data['directions'] = direc_stack
data['orientations'] = np.array(ori_stack)
data['mov'] = dist_stack
data['contours'] = right_contours
flystack = list()
for fly in range(nberflies):
flystack.append(fly_data(data, fly))
return flystack
def get_angle(trajectories, frameID, previous_angles, noise_thresh=0.5):
#Outputs direction, speed and distance for a given frame.
positions = trajectories[frameID]
oldpositions = trajectories[frameID-1] #Get previous values
delta = positions - oldpositions #Compute difference between two iterations
xs = delta[:,0]
ys = delta[:,1]
#Calculate angle and distance
dist, angle = angleFromPoints(xs, ys)
for i in range(len(delta)):
x = xs[i]
y = ys[i]
#Remove noise
if (x > -noise_thresh) & (x < noise_thresh) &\
(y > -noise_thresh) & (y < noise_thresh):
dist[i] = 0
angle[i] = previous_angles[i]
return(dist, angle)
def convert180to360(ori): #Should be given an array of numbers
ori_conv = copy.deepcopy(ori)
diffs = ori_conv[1:] - ori_conv[0:-1]
bool = True
for i in range(len(diffs)):
if abs(diffs[i]) > 90 and bool == True:
ori_conv[i+1:] += 180
bool = False
elif abs(diffs[i]) > 90 and bool == False:
ori_conv[i+1:] -= 180
bool = True
return(ori_conv)
def flip(ori, angle=180): #Should be given an array of numbers and a single value
ori_flip = copy.deepcopy(ori)
ori_flip += angle
for i in range(len(ori_flip)):
if ori_flip[i] >= 360:
ori_flip[i:] -= 360
elif ori_flip[i] <= 0:
ori_flip[i:] +=360
return(ori_flip)
def direc_stats(data, chunkrange, fly): #Get stats about a chunk
#Not really useful atm
tronc_direc = data['directions'][chunkrange,fly]
ori = data['orientations'][chunkrange,fly]
print("Stats for fly {} from frame {} to {}".format(fly, chunkrange[0], chunkrange[-1]))
print("Variance of the direction over the chunk is :", np.var(tronc_direc))
print("Cross correlations : ",np.correlate(tronc_direc, ori))
print("Correlation coef : ",np.corrcoef(np.vstack([tronc_direc, ori]))[1,0], '\n')
plt.figure(figsize=(20,5))
plt.ylim([-10,370])
plt.plot(ori)
plt.plot(tronc_direc)
def angleFromPoints(x,y): #Gets two arrays of x/y distances between two points
#Can do case by case or batch treatment.
dist = np.sqrt((x*x)+(y*y)) #Distance between each couple of points
angle = np.degrees(np.arctan(np.absolute(y)/np.absolute(x)))
#Angle correction based on negativity of coordinates.
#This ensure angle range on 0:360 and correspond to trigonometric circle. 0 is looking on the right.
for i in range(len(angle)):
if x[i] <0 and y[i] <0:
angle[i] += 180
elif x[i] <0:
angle[i] = 180 - angle[i]
elif y[i] <0:
angle[i] =360 - angle[i]
return dist, angle
#Handling the data in a "packaged" way
def save_ori_pickle(session_name):
Dir = "/home/maubry/python/idtrackerai/raw"
st = time.time()
video = session_name+".avi"
traj = session_name+".npy"
pkl_name = session_name+"_ori_data.pkl"
flystack = get_ori(Dir, video, traj)
f = open(pkl_name, "wb")
pkl.dump(flystack, f)
laps = time.time() - st
print('time elapsed : {}min and {}sec'.format(laps//60, int(laps%60)))
def get_ori_pickle(session_name):
pkl_name = session_name+"_ori_data.pkl"
f = open(pkl_name, "rb")
random_stack = pkl.load(f)
return random_stack