-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain_script.sh
345 lines (278 loc) · 14.4 KB
/
main_script.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
#!/bin/bash
#set -eo pipefail
# Script to process T1 images in a BIDS-formatted dataset with FreeSurfer's recon-all
# Author: Carolina Ferreira-Atuesta, Sept 2023
# Contact: cfatuesta@gmail.com
echo -e "FreeSurfer's recon-all BIDS Processing Script"
echo ""
echo "This script will:"
echo "- Run FreeSurfer's recon-all on all T1 images found in a BIDS dataset."
echo "- Create a quality control montage of the FreeSurfer output for each subject and session."
echo "- Extract the metrics into a CSV file."
echo "- Extract T1 metadata into a CSV file."
echo "- Run the longitudinal stream if there's more than one session per subject."
echo ""
echo "----------------------------------------------------------------------------------------"
echo "Note:"
echo -e "Make sure your dataset is organized according to the BIDS specification. This entails:\n"
echo "1. A main directory."
echo "2. Subdirectories for each subject labeled as 'sub-XXX'."
echo "3. Within each subject's directory, subdirectories for each session labeled as 'ses-XX'."
echo "4. Each session directory should contain an 'anat' subdirectory with the T1 image in nifti format named as 'sub-XXXXX_ses-XX_T1w.nii.gz'."
echo ""
echo "Also make sure you have the following scripts in your SUBJECTS_DIR path:"
echo "process_csv.py, merge_csv.py and process_longitudinal.py"
echo "Do not change the names of these scripts."
echo ""
echo "----------------------------------------------------------------------------------------"
echo "----------------------------------------------------------------------------------------"
echo -e "If your dataset isn't organized and named correctly, press Ctrl+C to exit."
echo "Otherwise, press enter to continue."
# Dependency Checks
if ! command -v freeview &> /dev/null; then
echo "Error: FreeSurfer's freeview command is not found."
exit 1
fi
if ! command -v magick &> /dev/null; then
echo "Error: ImageMagick's magick command is not found. To install, type: brew install imagemagick"
exit 1
fi
# Prompt user for paths
if [ -z "$SUBJECTS_DIR" ]; then
read -p "Enter the path to the SUBJECTS dataset: " SUBJECTS_DIR
else
read -p "Enter the path to the SUBJECTS dataset [current path: $SUBJECTS_DIR - if this is OK, just click enter]: " NEW_SUBJECTS_DIR
SUBJECTS_DIR=${NEW_SUBJECTS_DIR:-$SUBJECTS_DIR}
fi
if [ -z "$FREESURFER_HOME" ]; then
read -p "Enter the path to Freesurfer installation: " FREESURFER_HOME
else
read -p "Enter the path to Freesurfer installation [current path: $FREESURFER_HOME - if this is OK, just click enter]: " NEW_FREESURFER_HOME
FREESURFER_HOME=${NEW_FREESURFER_HOME:-$FREESURFER_HOME}
fi
# Check if paths exist
for path in "$SUBJECTS_DIR" "$FREESURFER_HOME"; do
if [ ! -d "$path" ]; then
echo "Error: $path does not exist or is not a directory."
exit 1
fi
done
# Set FREESURFER_HOME and source the setup script
export FREESURFER_HOME
source "$FREESURFER_HOME/SetUpFreeSurfer.sh"
# List all subjects in the SUBJECTS dataset
SUBJECTS=$(ls "$SUBJECTS_DIR" | grep -Eo 'sub-[a-zA-Z0-9]+')
# Display all T1s found
t1s_found=0
echo "Found the following T1 images:"
for subj in $SUBJECTS; do
for session in ses-01 ses-02 ses-03; do
T1="${SUBJECTS_DIR}/${subj}/${session}/anat/${subj}_${session}_T1w.nii" #or ending in .nii.g
if [ ! -e "$T1" ]; then
T1="${SUBJECTS_DIR}/${subj}/${session}/anat/${subj}_${session}_T1w.nii.gz"
T1_path="${SUBJECTS_DIR}/${subj}/${session}/anat/"
fi
if [ -f "$T1" ]; then
echo "$T1"
(( t1s_found++ ))
# Create array of T1s
t1s_array+=("$T1")
fi
done
done
# Final message with the count
echo "Total T1s found: $t1s_found"
# Extracting metadata from the T1s
python3 extract_metadata.py "${t1s_array[@]}"
echo "Checking which T1s have not been processed through freesurfer..."
t1s_not_processed=()
t1s_without_montage=()
for subj in $SUBJECTS; do
for session in ses-01 ses-02 ses-03; do
T1="${SUBJECTS_DIR}/${subj}/${session}/anat/${subj}_${session}_T1w.nii"
if [ ! -e "$T1" ]; then
T1="${SUBJECTS_DIR}/${subj}/${session}/anat/${subj}_${session}_T1w.nii.gz"
fi
if [ ! -f "$T1" ]; then
continue
fi
log_path="${SUBJECTS_DIR}/${subj}/${session}/derivatives/scripts/recon-all.log"
montage_3d="${SUBJECTS_DIR}/${subj}/${session}/derivatives/qa-output/montage-3d.png"
montage_2d="${SUBJECTS_DIR}/${subj}/${session}/derivatives/qa-output/montage-2d.png"
if [ ! -f "$log_path" ] || ! grep -q "finished without error" "$log_path"; then
t1s_not_processed+=("$T1")
elif [ ! -f "$montage_3d" ] || [ ! -f "$montage_2d" ]; then
t1s_without_montage+=("$T1")
fi
done
done
echo "Found ${#t1s_not_processed[@]} T1 images that have not been processed through freesurfer."
echo "Found ${#t1s_without_montage[@]} T1 images without montages."
create_2d_slices() {
local subj=$1
local sub_path=$2
local session=$3
local output_dir=$4
local cmd_file="$output_dir/freeview-commands-2d"
echo $subj $sub_path $session $output_dir $cmd_file
echo "-viewport coronal" > "$cmd_file"
echo "-zoom 1.2" >> "$cmd_file"
for n in $(seq 10 10 200); do
echo "-slice 130 150 $n" >> "$cmd_file"
echo "-ss $output_dir/frame-2d-coronal-$(printf '%03d' $n).png" >> "$cmd_file"
done
echo "-viewport axial" >> "$cmd_file"
for n in $(seq 40 10 230); do
echo "-slice 130 $n 132" >> "$cmd_file"
echo "-ss $output_dir/frame-2d-axial-$(printf '%03d' $n).png" >> "$cmd_file"
done
echo "-quit" >> "$cmd_file"
echo "Running freeview"
freeview -v \
"$sub_path/${session}/derivatives/${subj}/mri/T1.mgz" \
"$sub_path/${session}/derivatives/${subj}/mri/aparc+aseg.mgz" \
"$sub_path/${session}/derivatives/${subj}/mri/brainmask.mgz" \
"$sub_path/${session}/derivatives/${subj}/mri/aseg.mgz:colormap=lut:opacity=0.2" \
-f "$sub_path/${session}/derivatives/${subj}/surf/lh.white:edgecolor=blue" \
"$sub_path/${session}/derivatives/${subj}/surf/lh.pial:edgecolor=red" \
"$sub_path/${session}/derivatives/${subj}/surf/rh.white:edgecolor=blue" \
"$sub_path/${session}/derivatives/${subj}/surf/rh.pial:edgecolor=red" \
-cmd "$cmd_file"
echo "Creating montage"
magick montage "$output_dir/"frame-2d-*.png -tile 5x8 -geometry +0+0 "$output_dir/montage-2d.png"
rm "$output_dir/"frame-2d-*.png
}
create_3d_slices() {
local subj=$1
local sub_path=$2
local session=$3
local output_dir=$4
local cmd_file="$output_dir/freeview-commands-3d"
echo "-viewport 3d" > "$cmd_file"
echo "-hide-3d-frames" >> "$cmd_file"
echo "-zoom 1.5" >> "$cmd_file"
echo "-ss $output_dir/parc-3d-01-left.png" >> "$cmd_file"
echo "-cam azimuth 90 " >> "$cmd_file"
echo "-ss $output_dir/parc-3d-02-caudal.png" >> "$cmd_file"
echo "-cam azimuth 90 " >> "$cmd_file"
echo "-ss $output_dir/parc-3d-03-right.png" >> "$cmd_file"
echo "-cam azimuth 90" >> "$cmd_file"
echo "-ss $output_dir/parc-3d-04-rostral.png" >> "$cmd_file"
echo "-cam azimuth 180 elevation 90" >> "$cmd_file"
echo "-ss $output_dir/parc-3d-05-dorsal.png" >> "$cmd_file"
echo "-cam elevation 180" >> "$cmd_file"
echo "-ss $output_dir/parc-3d-06-ventral.png" >> "$cmd_file"
echo "-quit" >> "$cmd_file"
echo "Running freeview"
freeview -v \
"$sub_path/${session}/derivatives/${subj}/mri/brainmask.mgz" \
-f "$sub_path/${session}/derivatives/${subj}/surf/lh.pial:annot=aparc.annot:name=pial_aparc:visible=1" \
"$sub_path/${session}/derivatives/${subj}/surf/rh.pial:annot=aparc.annot:name=pial_aparc:visible=1" \
-cmd "$cmd_file"
echo "Creating montage"
magick montage "$output_dir/"parc-3d-*.png -tile 3x2 -geometry +0+0 "$output_dir/montage-3d.png"
#delete the individual images and only keep the montage
rm "$output_dir/"parc-3d-*.png
}
process_visualization() {
local subj=$1
local session=$2
local sub_path="${SUBJECTS_DIR}/${subj}"
local output_dir="${FREESURFER_OUT}/qa-output"
echo "Processing $subj"
[ ! -d "$output_dir" ] && mkdir -p "$output_dir"
echo "Creating 2d slices"
create_2d_slices "$subj" "$sub_path" "$session" "$output_dir"
echo "Creating 3d slices"
create_3d_slices "$subj" "$sub_path" "$session" "$output_dir"
# Printing the confirmation at the end
echo "QA images for $subj printed"
}
# If there are T1s that haven't been processed
if [ ${#t1s_not_processed[@]} -gt 0 ]; then
read -p "Do you want to process these images through the cross-sectional pipeline? (yes/no): " PROCEED_CHOICE
if [[ $PROCEED_CHOICE == "yes" ]]; then
echo "Starting the cross-sectional pipeline..."
for t1_path in "${t1s_not_processed[@]}"; do
subj=$(echo "$t1_path" | grep -Eo 'sub-[a-zA-Z0-9]+' | head -n1)
session=$(echo "$t1_path" | grep -Eo 'ses-[a-zA-Z0-9]+' | head -n1)
FREESURFER_OUT="${SUBJECTS_DIR}/${subj}/${session}/derivatives"
if [ -f "$t1_path" ]; then
#recon-all -i "$t1_path" -s "$subj" -all -qcache -sd "$FREESURFER_OUT"
#segmentHA_T1.sh "$subj"
process_visualization "$subj" "$session"
else
echo "T1 file not found for $subj"
fi
done
fi
fi
# If there are T1s without montages
if [ ${#t1s_without_montage[@]} -gt 0 ]; then
read -p "Do you want to generate montages for these images? (yes/no): " MONTAGE_CHOICE
if [[ $MONTAGE_CHOICE == "yes" ]]; then
echo "Generating montages..."
for t1_path in "${t1s_without_montage[@]}"; do
subj=$(echo "$t1_path" | grep -Eo 'sub-[a-zA-Z0-9]+' | head -n1)
session=$(echo "$t1_path" | grep -Eo 'ses-[a-zA-Z0-9]+' | head -n1)
FREESURFER_OUT="${SUBJECTS_DIR}/${subj}/${session}/derivatives"
process_visualization "$subj" "$session"
done
fi
fi
# Creating the tables will all the cross-sectional data
export SUBJECTS_DIR="$SUBJECTS_DIR"
#create a list of all subjects in $SUBJECTS_DIR that have a stats folder somwhere in their derivatives folder
measures_folder="${SUBJECTS_DIR}/measures"
if [ ! -d "$measures_folder" ]; then
mkdir "$measures_folder"
fi
BASE_SUBJECTS_DIR="$SUBJECTS_DIR"
# Check if the user wants to proceed with the longitudinal pipeline
read -p "Do you want to process through the longitudinal pipeline? (yes/no): " LONGITUDINAL_CHOICE
if [[ $LONGITUDINAL_CHOICE == "yes" ]]; then
# Iterate over each subject in the directory
for subject_dir in "$BASE_SUBJECTS_DIR"/sub-*; do
if [ -d "$subject_dir" ]; then
# Update the SUBJECTS_DIR for the current subject
SUBJECTS_DIR="$subject_dir"
echo "Processing subject: $subject_dir"
echo "Starting the longitudinal pipeline..."
python3 process_longitudinal.py "$SUBJECTS_DIR"
fi
done
fi
# Extract the longitudinal data into a csv file and merge it with the cross-sectional data
echo "Extracting longitudinal data into csv files..."
# Assuming you have a specific directory structure for longitudinal data, modify the path accordingly
list="${BASE_SUBJECTS_DIR}/*/*/derivatives/*"
list_long="${BASE_SUBJECTS_DIR}/*/*/derivatives/longitudinal/*"
measures_folder="${BASE_SUBJECTS_DIR}/measures"
asegstats2table --subjects $list --meas volume --skip --statsfile wmparc.stats --all-segs --tablefile $measures_folder/wmparc_stats.csv
asegstats2table --subjects $list --meas volume --skip --statsfile aseg.stats --all-segs --tablefile $measures_folder/aseg_stats.csv
aparcstats2table --subjects $list --hemi lh --meas volume --skip --tablefile $measures_folder/aparc_volume_lh.csv
aparcstats2table --subjects $list --hemi lh --meas thickness --skip --tablefile $measures_folder/aparc_thickness_lh.csv
aparcstats2table --subjects $list --hemi rh --meas volume --skip --tablefile $measures_folder/aparc_volume_rh.csv
aparcstats2table --subjects $list --hemi rh --meas thickness --skip --tablefile $measures_folder/aparc_thickness_rh.csv
asegstats2table --subjects $list --skip --statsfile=hipposubfields.lh.T1.v21.stats --tablefile=$measures_folder/hipposubfields_lh.csv
asegstats2table --subjects $list --skip --statsfile=hipposubfields.rh.T1.v21.stats --tablefile=$measures_folder/hipposubfields_rh.csv
asegstats2table --subjects $list --skip --statsfile=amygdalar-nuclei.lh.T1.v21.stats --tablefile=$measures_folder/amyg_nuclei_lh.csv
asegstats2table --subjects $list --skip --statsfile=amygdala-nuclei.rh.T1.v21.stats --tablefile=$measures_folder/amyg_nuclei_rh.csv
asegstats2table --subjects $list_long --meas volume --skip --statsfile wmparc.stats --all-segs --tablefile $measures_folder/long_wmparc_stats.csv
asegstats2table --subjects $list_long --meas volume --skip --statsfile aseg.stats --all-segs --tablefile $measures_folder/long_aseg_stats.csv
aparcstats2table --subjects $list_long --hemi lh --meas volume --skip --tablefile $measures_folder/long_aparc_volume_lh.csv
aparcstats2table --subjects $list_long --hemi lh --meas thickness --skip --tablefile $measures_folder/long_aparc_thickness_lh.csv
aparcstats2table --subjects $list_long --hemi rh --meas volume --skip --tablefile $measures_folder/long_aparc_volume_rh.csv
aparcstats2table --subjects $list_long --hemi rh --meas thickness --skip --tablefile $measures_folder/long_aparc_thickness_rh.csv
asegstats2table --subjects $list_long --skip --statsfile=hipposubfields.lh.T1.v21.stats --tablefile=$measures_folder/long_hipposubfields_lh.csv
asegstats2table --subjects $list_long --skip --statsfile=hipposubfields.rh.T1.v21.stats --tablefile=$measures_folder/long_hipposubfields_rh.csv
asegstats2table --subjects $list_long --skip --statsfile=amygdalar-nuclei.lh.T1.v21.stats --tablefile=$measures_folder/long_amyg_nuclei_lh.csv
asegstats2table --subjects $list_long --skip --statsfile=amygdala-nuclei.rh.T1.v21.stats --tablefile=$measures_folder/long_amyg_nuclei_rh.csv
echo "Done extracting longitudinal data into csv files."
# Find all csv files in $measures_folder
csv_files=$(find $measures_folder -type f -name "*.csv")
for csv_file in $csv_files; do
python3 process_csv.py $csv_file
done
# For all files in $measures_folder, merge them by sub and ses and save them in the same folder as all_measures.csv
python3 merge_csv.py $measures_folder