-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathZLP_pipeline.sh
executable file
·431 lines (349 loc) · 13.3 KB
/
ZLP_pipeline.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
#!/usr/bin/env bash
set -o nounset
set -o errexit
set -o pipefail
abspath() {
python -c "import os; print os.path.realpath('${1}')"
}
if [[ $# -ne 7 ]] && [[ $# -ne 8 ]]; then
cat >&2 <<-EOF
Usage: $0 <runname> <root-directory> <input-catalogue> <initial-wcs-solution> <confidence-map> <shuttermap> <wcsfit-reference-frame> [master-flat]
Argument descriptions:
* runname
The name of the run to use. This is so multiple runs can be performed on different
data, and the outputs can be unique.
* root-directory
This is the location of the input files. The directory structure must be as follows:
root-directory
OriginalData
images
<one directory per date>
action<number>_<optional description>
IMAGE*.fits.bz2
* input-catalogue
The list of coordinates to place apertures at
* initial_wcs_solution
The initial wcs solution computed by Tom's MCMC code to compute distortion
parameters
* confidence-map
* shuttermap
* wcsfit-reference-frame
* master-flat
Custom master flat to use, overriding the flat computed from the supplied data
EOF
exit 1
fi
# command line arguments
readonly BASEDIR=$(abspath $(dirname $0))
readonly RUNNAME=${1}
readonly WORKINGDIR=$(abspath ${2})
readonly GIVEN_INPUTCATALOGUE=$3
readonly WCSSOLUTION=$4
readonly CONFMAP=$5
readonly SHUTTERMAP=$6
readonly WCSFIT_REFERENCE_FRAME=$7
MASTER_FLAT=${8:-}
if [[ ! -z "${MASTER_FLAT}" ]]; then
MASTER_FLAT="$(abspath "${MASTER_FLAT}")"
fi
readonly IMGDIRS=${WORKINGDIR}/OriginalData/images/**/*
readonly SCRIPTDIR=${BASEDIR}/scripts
readonly BIASLIST=${WORKINGDIR}/OriginalData/output/${RUNNAME}_bias.list
readonly DARKLIST=${WORKINGDIR}/OriginalData/output/${RUNNAME}_dark.list
readonly FLATLIST=${WORKINGDIR}/OriginalData/output/${RUNNAME}_flat.list
readonly CORES=$(python -c "import multiprocessing; print multiprocessing.cpu_count()")
readonly APSIZE=3
echo "Using ${CORES} cores"
# Which tasks to run. Set to "1" if the task should be run otherwise "0".
readonly T1="1" # create input lists, default: 1
readonly T2="1" # create masterbias, default: 1
readonly T3="1" # create masterdark, default: 1
readonly T4="1" # copy temporary shutter map, default: 1
readonly T5="1" # create masterflat, default: 1
readonly T6="1" # reduce science images, default: 1
readonly T7="1" # perform photometry, default: 1
readonly T8="0" # run image subtraction, default: 0
readonly T9="1" # detrend, default: 1
readonly T10="1" # detrend with lightcurves, default: 1
readonly T11="1" # Make qa plots, default: 1
# Zero Level Pipeline
# Here all the commands are listed.
# this script can be run from command line. to do the whole pipeline.
create_input_lists() {
echo "Create lists with Images"
CMD="python ${SCRIPTDIR}/createlists.py \"$IMGDIRS\" IMAGE bz2 $RUNNAME"
echo $CMD
${CMD}
}
create_master_bias() {
# Create MasterBias
echo "Create MasterBias"
CMD="python ${SCRIPTDIR}/zlp-reduction/bin/pipebias.py $BIASLIST ${RUNNAME}_MasterBias.fits ${WORKINGDIR}/Reduction/output/${RUNNAME}"
echo ${CMD}
${CMD}
}
create_master_dark() {
#Create MasterDark
echo "Create MasterDark"
CMD="python ${SCRIPTDIR}/zlp-reduction/bin/pipedark.py $DARKLIST ${RUNNAME}_MasterBias.fits ${RUNNAME}_MasterDark.fits ${WORKINGDIR}/Reduction/output/${RUNNAME}"
echo ${CMD}
${CMD}
}
copy_temporary_shuttermap() {
echo "Copying temporary shutter map"
# # !!!! be carefull, shuttermap not yet automatically created. will be copied manualy
# #
DEST=${WORKINGDIR}/Reduction/output/${RUNNAME}
ensure_directory "${DEST}"
cp ${SHUTTERMAP} "${DEST}/$(basename ${SHUTTERMAP})"
}
create_master_flat() {
#Create MasterFlat
echo "Create MasterFlat"
CMD="python ${SCRIPTDIR}/zlp-reduction/bin/pipeflat.py $FLATLIST ${RUNNAME}_MasterBias.fits ${RUNNAME}_MasterDark.fits $SHUTTERMAP ${RUNNAME}_MasterFlat.fits ${WORKINGDIR}/Reduction/output/${RUNNAME}"
echo ${CMD}
${CMD}
}
copy_custom_master_flat() {
local MFLAT_DEST=${WORKINGDIR}/Reduction/output/${RUNNAME}/${RUNNAME}_MasterFlat.fits
echo "Copying custom master flat ${MASTER_FLAT} => ${MFLAT_DEST}"
cp "${MASTER_FLAT}" "${MFLAT_DEST}"
}
reduce_images() {
# Helper function to reduce a list of lists of images
# Function submits jobs asynchronously and returns the list of job names
# used to run the analysis so the wait step can halt other processing.
IMAGELISTS="${1}"
counter="0"
for IMAGELIST in ${IMAGELISTS}
do
IMAGELIST=${IMAGELIST#${WORKINGDIR}/OriginalData/output/}
ensure_directory ${WORKINGDIR}/Reduction/output/${RUNNAME}/${IMAGELIST%.*}
CMD="python ${SCRIPTDIR}/zlp-reduction/bin/pipered.py ${WORKINGDIR}/OriginalData/output/$IMAGELIST ${RUNNAME}_MasterBias.fits ${RUNNAME}_MasterDark.fits $SHUTTERMAP ${RUNNAME}_MasterFlat.fits ${WORKINGDIR}/Reduction/output/${RUNNAME} ${WORKINGDIR}/Reduction/output/${RUNNAME}/${IMAGELIST%.*}"
echo ${CMD}
${CMD}
done
}
any_filelists() {
local readonly IMAGELISTS=$1
ls ${IMAGELISTS} 2>/dev/null >/dev/null
}
reduce_science_images() {
echo "Reduce Science Images"
IMAGELISTS=$WORKINGDIR/OriginalData/output/${RUNNAME}_image_*.list
if $(any_filelists ${IMAGELISTS}); then
reduce_images "${IMAGELISTS}"
fi
}
wait_for_jobs() {
if [ -z "$1" ]
then
echo "Error in invokation; wait_for_jobs <jobids>" >&2
exit 1
fi
JOBIDS="${1}"
echo "Wait until jobs '${JOBIDS}' finish"
qsub -hold_jid "${JOBIDS}" -N WAIT -sync y -cwd ${WORKINGDIR}/wait.sh
}
iterate_and_act_on_lists() {
local readonly lists="$1"
local readonly action="$2"
if $(any_filelists ${lists}); then
for fname in ${lists}; do
eval "${action} ${fname}"
done
fi
}
shrink_catalogue_directory() {
local readonly catcachedir=$1
ls ${catcachedir} | grep cch_ | while read fname; do
local readonly filepath=${catcachedir}/${fname}
python ${BASEDIR}/scripts/shrink_wcs_reference.py ${filepath} -o ${filepath}
done
}
single_perform_aperture_photometry() {
local filelist=$1
local readonly basename=${filelist#${WORKINGDIR}/OriginalData/output/}
local readonly jobname=${basename%.*}
local readonly output_directory=${WORKINGDIR}/AperturePhot/output/${RUNNAME}/${jobname}
local readonly image_filelist=${output_directory}/filelist.txt
ensure_directory "$output_directory"
find ${WORKINGDIR}/Reduction/output/${RUNNAME}/${jobname} -name 'proc*.fits' > ${image_filelist}
python ${SCRIPTDIR}/zlp-photometry/bin/ZLP_app_photom.py \
--confmap ${CONFMAP} \
--catfile ${GIVEN_INPUTCATALOGUE} \
--nproc ${CORES} \
--filelist ${image_filelist} \
--outdir ${output_directory} \
--dist ${WCSSOLUTION} \
--apsize ${APSIZE} \
--wcsref ${WCSFIT_REFERENCE_FRAME}
PIPELINESHA=$(extract_pipeline_sha $(dirname $0))
# Condense the photometry
python ${SCRIPTDIR}/zlp-condense/zlp_condense.py \
--output "${output_directory}/output.fits" \
--sha "${PIPELINESHA}" \
$(cat ${image_filelist} | sed 's/.fits/.fits.phot/')
}
perform_aperture_photometry() {
echo "Running aperture photometry"
cd ${WORKINGDIR}/AperturePhot
local readonly filelists=${WORKINGDIR}/OriginalData/output/${RUNNAME}_image_*.list
iterate_and_act_on_lists ${filelists} single_perform_aperture_photometry
}
run_detrending() {
SYSREM=sysrem
if hash ${SYSREM} 2>/dev/null; then
echo "Detrending with SYSREM"
local readonly photomfile=$(find ${WORKINGDIR}/AperturePhot/output -name 'output.fits')
if [ ! -z "${photomfile}" ]; then
local readonly output_directory=$(dirname $photomfile)
local readonly outfile=${output_directory}/tamout.fits
echo "Running sysrem to create ${outfile}"
if [ -z ${NOSYSREM:-} ]; then
${SYSREM} ${photomfile} ${outfile}
python ${BASEDIR}/scripts/combine_with_sysrem.py -v -p ${photomfile} -t ${outfile}
else
echo '*** sysrem has been disabled with envar: NOSYSREM. unset to run sysrem'
fi
else
echo "Cannot find photometry output files" >&2
fi
else
echo "Cannot find sysrem binary ${SYSREM}" >&2
fi
}
# if [ "$T11" = "1" ] ; then
# # Subtract Images
# echo "Subtract Science Images"
# cd /ngts/pipedev/Subtractphot
# for IMAGEFILE in $WORKINGDIR/OriginalData/output/${RUNNAME}_image_*.list
# do
# IMAGEFILE=${IMAGEFILE#${WORKINGDIR}}
# IMAGEFILE=${IMAGEFILE#/OriginalData/output/}
# IMAGEFILE=${IMAGEFILE%.*}
# echo $IMAGEFILE
# $IMFILE = ${IMAGEFILE#${RUNNAME}_image_}
# numsfiles=($WORKINGDIR/InputCatalogue/output/*/*${IMFILE}/outstack.fits)
# numfiles=${#numfiles[@]}
# if [ "$numfiles" > "1" ] ; then
# echo "${numfiles} reference Frames found, selecting the first: ${numsfiles[0]}"
# fi
# $Reference = ${numsfiles[0]}
# $RefCat = ($WORKINGDIR/InputCatalogue/output/*/*${IMFILE}/catfile.fits)
# $RefCat = ${RefCat[0]}
# find ${WORKINGDIR}/Reduction/${RUNNAME}/${IMAGEFILE} -name '*.fits' > ${WORKINGDIR}/Subtractphot/${IMAGEFILE}.txt
# python /ngts/pipedev/Subtractphot/scripts/image_subtraction_main.py /ngts/pipedev/Subtractphot $Reference ${WORKINGDIR}/Subtractphot/${IMAGEFILE}.txt ${RefCat} 5 20 ${RUNNAME}
# done
# cd /ngts/pipedev
# fi
# Some helper functions
ensure_directory() {
DIR=${1}
test -d ${DIR} || mkdir -p ${DIR}
}
setup_environment() {
if [ -z ${DISABLE_ANACONDA:-} ]; then
# Allow the user to override the anaconda path variable
if [ -z ${ANACONDA_PATH:-} ]; then
# If anaconda is available, use it
case `hostname` in
ngtshead*)
ANACONDA_PATH=/home/sw/anaconda
;;
*)
ANACONDA_PATH=${HOME}/anaconda
;;
esac
fi
PARANAL_ANACONDA_PATH=/usr/local/anaconda
if [[ -d ${ANACONDA_PATH} ]]; then
export PATH=${PARANAL_ANACONDA_PATH}/bin:${ANACONDA_PATH}/bin:${PATH}
fi
fi
echo "Using python: $(which python)"
set +o nounset
export PATH=/usr/local/pipeline/bin:${PATH}
export PYTHONPATH=${BASEDIR}/scripts/zlp-photometry:${BASEDIR}/scripts:${BASEDIR}/scripts/zlp-input-catalogue:$PYTHONPATH
case "$(hostname -s)" in
ngts*)
export IERS_DATA=/usr/local/pipeline/data
export JPLEPH_DATA=${IERS_DATA}/linux_p1550p2650.430t
;;
mbp*)
export IERS_DATA=${HOME}/.local/data
export JPLEPH_DATA=${IERS_DATA}/linux_p1550p2650.430t
;;
esac
if [ ! -z ${IERS_DATA} ]; then
echo "IERS data path: ${IERS_DATA}"
fi
if [ ! -z ${JPLEPH_DATA} ]; then
echo "JPLEPH data path: ${JPLEPH_DATA}"
fi
# LD_LIBRARY_PATH for sysrem
LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-}:/opt/intel/composer_xe_2013_sp1.0.080/compiler/lib/intel64
echo "Environment set up"
set -o nounset
}
setup_directory_structure() {
for subdir in OriginalData/output AperturePhot Reduction Reduction/output; do
local dirpath=${WORKINGDIR}/${subdir}
ensure_directory ${WORKINGDIR}/${subdir}
done
}
run_lightcurves_detrending() {
# Aperture index for casu lightcurves:
# 0 = auto (bad)
# 1 = 1 * rcore
# 2 = sqrt(2) * rcore
# 3 = 2 * rcore
local ap_index=1
if hash lightcurves-casu 2>/dev/null; then
local readonly ref=$GIVEN_INPUTCATALOGUE
local readonly photomfile=$(find ${WORKINGDIR}/AperturePhot/output -name 'output.fits' -print)
if [ ! -z "${photomfile}" ]; then
local readonly output_directory=$(dirname $photomfile)
local readonly outfile=${output_directory}/casu-lightcurves-out.fits
local readonly number_of_coefficients=2
local readonly source_files_dir=${WORKINGDIR}/Reduction/output/${RUNNAME}
echo "Running casu lightcurves file to create ${outfile}"
lightcurves-casu -f ${number_of_coefficients} -a ${ap_index} -o ${outfile} -p ${ref} $(find ${source_files_dir} -name 'proc*.phot')
python ${BASEDIR}/scripts/combine_with_casu_detrended.py -v -p ${photomfile} -d ${outfile}
fi
else
echo "Cannot find CASU lightcurves binary" >&2
fi
}
generate_qa_plots() {
bash ${BASEDIR}/scripts/zlp-qa/run.sh \
${WORKINGDIR} \
${WORKINGDIR}/QualityAssessment
}
extract_pipeline_sha() {
local readonly dirname="$1"
(cd $dirname && git rev-parse HEAD)
}
# Do photometry on subtracted Images
main() {
setup_environment
setup_directory_structure
cd ${WORKINGDIR}/OriginalData
[ "$T1" = "1" ] && create_input_lists
cd ${WORKINGDIR}/Reduction
[ "$T2" = "1" ] && create_master_bias
[ "$T3" = "1" ] && create_master_dark
[ "$T4" = "1" ] && copy_temporary_shuttermap
[ "$T5" = "1" ] && create_master_flat
if [[ ! -z "${MASTER_FLAT}" ]]; then
copy_custom_master_flat
fi
[ "$T6" = "1" ] && reduce_science_images
cd ${WORKINGDIR}
[ "$T7" = "1" ] && perform_aperture_photometry
# [ "$T8" = "1" ] && perform_image_subtraction
[ "$T9" = "1" ] && run_detrending
[ "$T10" = "1" ] && run_lightcurves_detrending
[ "$T11" = "1" ] && generate_qa_plots
}
main 2>&1 | tee ${RUNNAME}.log