forked from deapplegate/wtgpipeline
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathadam_do_masking_master.sh
executable file
·238 lines (201 loc) · 13.4 KB
/
adam_do_masking_master.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
#! /bin/bash -xv
#adam-does# Has 3 parts:
# #(1)# distributes sets, runs spikefinder, runs CRNitshke, creates weights
# #(2)# by-hand masking
# #(3)# incorporate regions into weights/flags and consolidate directories
#adam-call_example# ./do_masking_master_adam.sh "MACS0416-24" "W-C-RC_2010-11-04 W-J-B_2010-11-04 W-S-Z+_2010-11-04"
# ./adam_do_masking_master.sh "MACS1226+21" "W-C-IC_2010-02-12 W-C-IC_2011-01-06 W-C-RC_2006-03-04 W-C-RC_2010-02-12 W-J-B_2010-02-12 W-J-V_2010-02-12 W-S-G+_2010-04-15 W-S-I+_2010-04-15 W-S-Z+_2011-01-06"
# ./adam_do_masking_master.sh "Zw2089" "W-J-V_2007-02-13 W-J-V_2010-12-05 W-S-I+_2009-03-28 W-J-V_2010-03-12 W-S-I+_2007-02-13"
### script to run the steps for masking image sets
###
### this used to be the first part of the do_Subaru_coadd_template scripts;
### is now disjunct
###
### $Id: do_masking.sh,v 1.3 2010-10-05 22:27:58 dapple Exp $
. progs.ini > /tmp/progs.ini.log 2>&1
. bash_functions.include > /tmp/bash_functions.include.log 2>&1
export cluster=$1 # cluster nickname as in /nfs/slac/g/ki/ki05/anja/SUBARU/SUBARU.list
filter_run_pairs=$2
#adam-call_example#cluster=MACS0416-24
#adam-call_example#filter_run_pairs=(W-C-RC_2010-11-04 W-J-B_2010-11-04 W-S-Z+_2010-11-04)
REDDIR=`pwd`
lookupfile=/nfs/slac/g/ki/ki05/anja/SUBARU/SUBARU.list
#adam# lookupfile (SUBARU.list) has list of clusters and positions
export SUBARUDIR=/nfs/slac/g/ki/ki18/anja/SUBARU
export INSTRUMENT=SUBARU
#adam# pick the ending, filter, and run
#adam-call_example#export ending="OCF" #gets this itself later on
#adam-call_example#export run="2010-11-04"
#adam-call_example#export filter="W-C-RC" #export filter="W-J-B" #export filter="W-S-Z+"
#####################################################################################################
#####################################################################################################
### #(1)# STARTING LOOP #(1)# distributes sets, runs spikefinder, runs CRNitshke, creates weights ###
#####################################################################################################
#####################################################################################################
for filter_run in ${filter_run_pairs[@]}
do
########################
### Some Setup Stuff ###
########################
export filter=`echo ${filter_run} | awk -F'_' '{print $1}'`
export run=`echo ${filter_run} | awk -F'_' '{print $2}'`
echo "run=" ${run}
echo "filter=" ${filter}
#adam# Find Ending
testfile=`ls -1 ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE/SUP*_2*.fits | awk 'NR>1{exit};1'`
export ending=`basename ${testfile} | awk -F'_2' '{print $2}' | awk -F'.' '{print $1}'`
echo "ending=" ${ending}
###./BonnLogger.py clear
###export BONN_FILTER=${filter}; export BONN_TARGET=${run}
./setup_SUBARU.sh ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE
. ${INSTRUMENT:?}.ini > /tmp/subaru_ini.log 2>&1
#####################
## distribute sets ##
#####################
#adam# distribute_sets_subaru.sh: copy / link images into cluster directories
#adam# lookupfile (SUBARU.list) has list of clusters and positions
#adam# 1000 means within arcsecs of cluster coords?
#adam# this changes from run_filter directories to directories divided up by clusters
#adam# it moves run_filter to cluster/filter_run, copying over:
# SCIENCE/SUPA*_#OCF.fits
# SCIENCE/SPLIT_IMAGES/SUPA*_#.fits
# WEIGHTS/globalweight_#.fits
# WEIGHTS/globalflags_#.fits
#./distribute_sets_subaru.sh ${SUBARUDIR} ${run}_${filter}/SCIENCE ${ending} 1000 ${lookupfile}
if [ ! -d ${SUBARUDIR}/${cluster}/${filter}_${run} ]; then
echo "this directory isn't there: " ${SUBARUDIR}/${cluster}/${filter}_${run}
exit 1
fi
export BONN_TARGET=${cluster} ; export BONN_FILTER=${filter}_${run}
##################################################################
### Capture Variables
###./BonnLogger.py config \
### cluster=${cluster} \
### filter=${filter_run} \
### config=${config} \
### ending=${ending}
#######################
### weight creation ###
#######################
#### C: Processing for each Ind Image ###
### spikefinder ###
#adam# spikefinder finds saturation spikes, sattelites, and shadow from guider cam
#adam# made: SCIENCE/diffmask/, diffmask/SUPA0125903_10OCF.sf.fits
#adam# this takes ~20 min
#./parallel_manager.sh spikefinder_para.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE SUP ${ending} ${filter}
./parallel_manager.sh spikefinder_para_NOsh.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE SUP ${ending} ${filter}
#adam-check# make sure that there isn't anything masked that shouldn't be. If there is using spikefinder_para.sh, then use spikefinder_para_NOsh.sh
# spikefinder_para_NOsh.sh finds saturation spikes, sattelites, and DOES NOT LOOK FOR shadow from guider cam
# spikefinder_para.sh finds saturation spikes, sattelites, and shadow from guider cam
#adam# ds9 ${cluster}/${filter}_${run}/SCIENCE/SUPA*OCF.fits &
#adam# ds9 ${cluster}/${filter}_${run}/SCIENCE/diffmask/SUPA*OCF.sf.fits &
#echo "#adam-check# make sure that there isn't anything masked that shouldn't be. If there is using spikefinder_para.sh, then use spikefinder_para_NOsh.sh"
#echo " spikefinder_para_NOsh.sh finds saturation spikes, sattelites, and DOES NOT LOOK FOR shadow from guider cam"
#echo " spikefinder_para.sh finds saturation spikes, sattelites, and shadow from guider cam"
#echo "ds9 ${cluster}/${filter}_${run}/SCIENCE/SUPA*OCF.fits &"
#echo "ds9 ${cluster}/${filter}_${run}/SCIENCE/diffmask/SUPA*OCF.sf.fits &"
#exit 0;
### C: CHIP PROCESSING ###
### RUN CRNITSCHKE and make the weight files ###
#adam# run CRNitschke and then make actual weight and flag fits files from globalweights, diffmask, and the region files
#adam# setup CRNitschke file makes the CRNitschke_final_${cluster}_${filter}_${run}.txt file with seeing, rms, and sextractor cut values needed for CRN pipeline
./create_weights_raw_delink_para_CRNitschke_setup.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE ${ending} WEIGHTS 2>&1 | tee -a OUT-cwrdp_CRNitschke_setup_${cluster}_${filter}_${run}.log
#adam# makes: actual weights from globalweights, diffmask, and the region files. Also, all of the CRNitschke pipeline output!
#adam# makes: ~/data/MACS0416-24/W-S-Z+_2010-11-04/WEIGHTS/SUPA0100120_10OCF.flag.fits and SUPA0100120_10OCF.weight.fits
#./parallel_manager.sh ./create_weights_raw_delink_para_CRNitschke.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE ${ending} WEIGHTS 2>&1 | tee -a OUT-cwrdp_CRNitschke_${cluster}_${filter}_${run}.log
#adam-check# ds9 ~/data/MACS0416-24/W-S-Z+_2010-11-04/SUPA0125896_9OCF.weight.fits ~/data/MACS0416-24/W-S-Z+_2010-11-04/SUPA0125896_9OCF.flag.fits -regions load ~/data/MACS0416-24/W-S-Z+_2010-11-04/SCIENCE/reg/SUPA0125896_9.reg &
#adam# multiplies science by weights. these are only useful for looking at them to see which other things might need to be masked
#./parallel_manager.sh ./create_science_weighted.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE WEIGHTS ${ending}
#adam# makes: SCIENCE_weighted/ directory and all of it's contents, such as ~/data/A2744/W-S-I+_2008-08-01/SCIENCE_weighted/SUPA0100117_10OCF.weighted.fits
###adam# masks the radial region surrounding the edges of the FOV of the image.
##./parallel_manager.sh ./adam_apply_RADIAL_MASK_para.sh ${SUBARUDIR} ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE/ ${SUBARUDIR}/${cluster}/${filter}_${run}/WEIGHTS/ ${ending}
###adam-SKIPPED###
###adam# fixed issue with stars landing on overscan regions, which previously masked horizontal lines. (don't need unless you see this issue appearing in the new data)
#### run once, at end of first time through. Here, you'll be able to edit the region files, and adjust the masks.
###./maskBadOverscans.py ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE SUPA
###./create_binnedmosaics_empty.sh ${SUBARUDIR}/${cluster}/${filter}_${run} WEIGHTS SUP ${ending}.weight 8 -32
###./create_binnedmosaics_empty.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE_weighted SUP ${ending}.weighted 8 -32
echo "Todo: Mask images by hand for remaining defects (satelite trails, blobs, etc). Use the images in SCIENCE_weighted for masking. maskImages.pl may be useful for managing region files."
echo "For Simplicity, make sure you save region files to SCIENCE/reg (use maskImages.pl -r)"
echo "./maskImages.pl -r ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE/reg/ -l toMask_${cluster}_${filter}_${run}-start.list -d ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE_weighted/ SUP"
echo "Once done with by-hand masking, Goto FINAL LOOP "
done #done looping over ${filter_run_pairs[@]}
echo "do by-hand masking for each of the filter_run_pairs=" ${filter_run_pairs[@]}
exit 0;
##############################################
##############################################
######### #(2)# BY-HAND MASKING #(2)# ########
##############################################
##############################################
#adam# add manual masks, e.g.
#adam# opens images, one after another so you can mask things by hand
#adam# maskImages.pl -l files_toMask.list -r reg_dir -d SCIENCE_weighted_dir prefix
#adam# you have to open ds9 first! (ds9 &)
#adam# help menu: maskImages.pl -h
#touch toMask_${cluster}_${filter}_${run}-start.list
ds9 -layout vertical &
./maskImages.pl -r ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE/reg/ -l toMask_${cluster}_${filter}_${run}-start.list -d ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE_weighted/ SUP
#adam# if I want to check it out afterwards: ds9 ~/data/MACS0416-24/W-S-Z+_2010-11-04/SUPA0125896_9OCF.weight.fits ~/data/MACS0416-24/W-S-Z+_2010-11-04/SUPA0125896_9OCF.flag.fits -regions load ~/data/MACS0416-24/W-S-Z+_2010-11-04/SCIENCE/reg/SUPA0125896_9.reg &
###adam-SKIPPED# don't worry about it
### might want to use mark_badpixel_regions.pl , too?
exit 0;
#####################################################################################################
#####################################################################################################
### #(3)# FINAL LOOP #(3)# incorporate regions into weights/flags and consolidate directories #######
#####################################################################################################
#####################################################################################################
for filter_run in ${filter_run_pairs[@]}
do
### FINAL LOOP | 1.) make regions compatible 2.) put them in flags/weights 3.) consolidate directories ###
########################
### Some Setup Stuff ###
########################
export filter=`echo ${filter_run} | awk -F'_' '{print $1}'`
export run=`echo ${filter_run} | awk -F'_' '{print $2}'`
echo "run=" ${run}
echo "filter=" ${filter}
#Find Ending
testfile=`ls -1 ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE/SUP*_2*.fits | awk 'NR>1{exit};1'`
export ending=`basename ${testfile} | awk -F'_2' '{print $2}' | awk -F'.' '{print $1}'`
echo "ending=" ${ending}
###./BonnLogger.py clear
###export BONN_FILTER=${filter}; export BONN_TARGET=${run}
./setup_SUBARU.sh ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE
. ${INSTRUMENT:?}.ini
export BONN_TARGET=${cluster} ; export BONN_FILTER=${filter}_${run}
###############################
### make regions compatible ###
###############################
#adam# makes regions readable by rest of pipeline (include in final masking script).transform ds9-region file into ww-readable file:
#adam# converts `box` regions to `polygon`
#./convertRegion2Poly.py ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE
#adam# changes `polygon` to `POLYGON`!
#./transform_ds9_reg_alt.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE
#adam# deletes region files that are empty
#./clean_empty_regionfiles.sh ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE/reg/*.reg
########################################
### put regions in weight/flag files ###
########################################
#adam# now add these region masks to the weight/flag files
#./parallel_manager.sh add_regionmasks.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE ${ending} WEIGHTS ${filter} 2>&1 | tee -a OUT-add_regionmasks_${filter}_${cluster}_${run}.log
#adam# could re-do the science_weighted if I wanted to check out how they look
##./parallel_manager.sh create_science_weighted.sh ${SUBARUDIR}/${cluster}/${filter}_${run} SCIENCE WEIGHTS ${ending}
#########################################
### Consolidate into filter directory ###
#########################################
if [ ! -d ${SUBARUDIR}/${cluster}/${filter} ]; then
mkdir ${SUBARUDIR}/${cluster}/${filter}
mkdir ${SUBARUDIR}/${cluster}/${filter}/SCIENCE
mkdir ${SUBARUDIR}/${cluster}/${filter}/WEIGHTS
fi
cd ${SUBARUDIR}/${cluster}/${filter}/SCIENCE
ln -s ../../${filter}_${run}/SCIENCE/SUP*fits .
cd ${SUBARUDIR}/${cluster}/${filter}/WEIGHTS
ln -s ../../${filter}_${run}/WEIGHTS/SUP*fits .
cd ${REDDIR}
###adam-SKIPPED#
###./makePNGs.pl ${SUBARUDIR}/${cluster}/${filter}_${run}/SCIENCE_weighted/BINNED
### CHECKPOINT ###
#./BonnLogger.py checkpoint Masking
done
exit 0;