1 |
#! /bin/bash |
2 |
|
3 |
# mlovccf (make list of valid ccfs) |
4 |
# |
5 |
# Makes the list of valid CCFs. These are all CCFs, among those published since the |
6 |
# beginning of the project, that are required to process any ODF at a given date, |
7 |
# typically the current date. This list will change with time as far as new |
8 |
# CCFs are issued to deal with new calibration data and/or replace obsolete ones. |
9 |
# |
10 |
# To identify the set of CCFs required to process a given ODF is the ijob of task cifbuild. |
11 |
# To make such identification, cifbuild needs only the start time of the observation. |
12 |
# Therefore, if we provide a list of all observed ODFs at a given date together with their |
13 |
# respective start time, we could compute all the CCFs required to process them. |
14 |
# For such a moment, that list is the list of valid CCFs required to process |
15 |
# any observed ODF. |
16 |
# |
17 |
# We need as input the list of all observed ODFs at a given date. |
18 |
# This is obtained from the report of all observations that the XSA makes to CDS, |
19 |
# xsaobslog.txt (ftp://nxsa.esac.esa.int/pub/cfs_obslog/xsaobslog.txt). |
20 |
# |
21 |
# $Id: mlovccf,v 1.6 2015/03/12 16:02:52 ccflib Exp $ |
22 |
|
23 |
# Next two lines allow to submit this script to the grid |
24 |
# request Bourne shell as shell for job |
25 |
#$ -S /bin/bash |
26 |
|
27 |
|
28 |
host=`hostname | cut -d. -f1` |
29 |
|
30 |
now=`date +'%Y%m%d_%H%M'` |
31 |
now_table=`date +'%Y-%m-%dT%H:%M:%S'` |
32 |
|
33 |
# Function find_latest_mif to get the latest XMM_CALINDEX from given directory |
34 |
|
35 |
find_latest_mif() |
36 |
{ |
37 |
|
38 |
[ -z "$1" ] && return |
39 |
|
40 |
CCFPATH="$1" |
41 |
|
42 |
maxissue="0" |
43 |
for mif in `ls -1 ${CCFPATH}/XMM_CALINDEX_*.CCF` |
44 |
do |
45 |
mifbase=`basename $mif` |
46 |
issue=`echo $mifbase | awk -F"." '{print $1}' | awk -F"_" '{print $3}'` |
47 |
[ "$issue" -ge "$maxissue" ] && maxissue="$issue" |
48 |
done |
49 |
|
50 |
MIF=${CCFPATH}/XMM_CALINDEX_${maxissue}.CCF |
51 |
|
52 |
} |
53 |
|
54 |
# Function get_ccf_list to list on stdout the table of CCFs in a given CIF |
55 |
# |
56 |
# Requires that Heasoft is initialised. Otherwise exits with error. |
57 |
|
58 |
get_ccf_list() |
59 |
{ |
60 |
[ -z "$1" ] && return |
61 |
|
62 |
cif_to_process="$1" |
63 |
|
64 |
noversion=`which fversion | grep -c no` |
65 |
|
66 |
if [ "${noversion}" != "0" ] ; then |
67 |
echo "Error: Heasoft not initialised !" |
68 |
exit |
69 |
fi |
70 |
|
71 |
fdump ${cif_to_process} prhead=no showcol=no showrow=no page=no \ |
72 |
columns="SCOPE TYPEID ISSUE" \ |
73 |
rows=- STDOUT |
74 |
} |
75 |
|
76 |
|
77 |
# We need to get the list of all observed ODFs at the current date. |
78 |
# This is obtained from ftp://nxsa.esac.esa.int/pub/cds_obslog/xsaobslog.txt. |
79 |
|
80 |
VALID_CCF="$HOME/valid_ccf" |
81 |
|
82 |
cd ${VALID_CCF} |
83 |
|
84 |
# Remove any xsaobslog.txt previously downloaded. |
85 |
|
86 |
[ -f "xsaobslog.txt" ] && rm -rf xsaobslog.txt |
87 |
|
88 |
# Get the list latest list of ODFs available from nXSA server. |
89 |
|
90 |
wget -q ftp://nxsa.esac.esa.int/pub/cds_obslog/xsaobslog.txt |
91 |
|
92 |
# Rename xsaobslog.txt to xsaobslog_${now}.txt just to have a reference |
93 |
# of which list of ODFs was used to compute the list of valid CCFs. |
94 |
|
95 |
mv xsaobslog.txt xsaobslog_${now}.txt |
96 |
|
97 |
# Sets SAS_CCFPATH and initialises HEADAS and SAS depending on host |
98 |
|
99 |
case "$host" in |
100 |
xvsoc01|xmm) |
101 |
export SAS_CCFPATH="/data/xmm/ccflib/ftp-area/constituents" |
102 |
export SAS_DIR=/data/xmm/ccflib/sas |
103 |
export SAS_PATH=$SAS_DIR |
104 |
source $SAS_DIR/sas-setup.sh |
105 |
;; |
106 |
sasbld01|sasbld02) |
107 |
export SAS_CCFPATH="/home/ccflib/ftp-area/constituents" |
108 |
/sas/bin/confsas |
109 |
export HEADAS=/sasbuild/local/${host}/headas/architecture |
110 |
. $HEADAS/headas-init.sh |
111 |
# To be able to mix several jobs on the same ccflib account |
112 |
[ ! -d "${HOME}/pfiles/${host}" ] && mkdir ${HOME}/pfiles/${host} |
113 |
export PFILES="${HOME}/pfiles/${host};${HEADAS}/syspfiles" |
114 |
echo -n "HEADAS version: " |
115 |
fversion |
116 |
source $HOME/setsas.sh 1> /dev/null |
117 |
echo "SAS Version: " |
118 |
sasversion -V 1 |
119 |
export SAS_VERBOSITY=0 |
120 |
;; |
121 |
scigrid6|cn-*) |
122 |
export SAS_CCFPATH="/home/ccflib/ftp-area/constituents" |
123 |
/sas/bin/confsas |
124 |
export HEADAS=/sasbuild/local/sasbld02/headas/architecture |
125 |
. $HEADAS/headas-init.sh |
126 |
# To be able to mix several jobs on the same ccflib account |
127 |
[ ! -d "${HOME}/pfiles/${host}" ] && mkdir ${HOME}/pfiles/${host} |
128 |
export PFILES="${HOME}/pfiles/${host};${HEADAS}/syspfiles" |
129 |
echo -n "HEADAS version: " |
130 |
fversion |
131 |
source $HOME/setsas.sh 1> /dev/null |
132 |
echo "SAS Version: " |
133 |
sasversion -V 1 |
134 |
export SAS_VERBOSITY=0 |
135 |
;; |
136 |
*) echo "Error: Do not know how to do it in host $host" ; exit ;; |
137 |
esac |
138 |
|
139 |
# Finds the latest MIF issue |
140 |
|
141 |
find_latest_mif "${SAS_CCFPATH}" |
142 |
|
143 |
|
144 |
# Now scans the list of OBDS ID registered in the previous list to get the start time |
145 |
# and then runs cifbuild to get the respective CIF |
146 |
|
147 |
# Output file |
148 |
touch ${VALID_CCF}/all_ccfs_${now}.txt |
149 |
|
150 |
n=0 |
151 |
|
152 |
while read line |
153 |
do |
154 |
obsid=`echo $line | awk -F"|" '{print $2}'` |
155 |
stime=`echo $line | awk -F"|" '{print $9}'` |
156 |
stime=`echo $stime | tr " " "T"` |
157 |
n=$((n+1)) |
158 |
echo "($n) Processing $obsid $stime" ; echo |
159 |
|
160 |
# Run cifbuild |
161 |
|
162 |
cif_file=${VALID_CCF}/${obsid}.cif |
163 |
|
164 |
cifbuild --withmasterindexset='yes' \ |
165 |
--masterindexset=${MIF} \ |
166 |
--withobservationdate='yes' \ |
167 |
--observationdate=${stime} \ |
168 |
--analysisdate='now' \ |
169 |
--calindexset=${cif_file} |
170 |
|
171 |
|
172 |
# Gets the list of CCFs included in the CIF |
173 |
|
174 |
get_ccf_list "${cif_file}" > ${VALID_CCF}/${obsid}_ccfs.tmp |
175 |
|
176 |
while read line |
177 |
do |
178 |
[ "$line" = "" ] && continue |
179 |
class=`echo $line | awk -F" " '{print $1"_"$2}'` |
180 |
issue=`echo $line | awk -F" " '{print $3}'` |
181 |
issue=`printf "%04d" ${issue}` |
182 |
echo ${class}_${issue}.CCF >> ${VALID_CCF}/${obsid}_ccfs.txt |
183 |
|
184 |
done < ${VALID_CCF}/${obsid}_ccfs.tmp |
185 |
|
186 |
rm ${VALID_CCF}/${obsid}_ccfs.tmp |
187 |
|
188 |
mv ${VALID_CCF}/all_ccfs_${now}.txt ${VALID_CCF}/all_ccfs_${now}.tmp |
189 |
|
190 |
cat ${VALID_CCF}/${obsid}_ccfs.txt ${VALID_CCF}/all_ccfs_${now}.tmp | sort -u > ${VALID_CCF}/all_ccfs_${now}.txt |
191 |
|
192 |
rm ${cif_file} |
193 |
rm ${VALID_CCF}/${obsid}_ccfs.txt |
194 |
rm ${VALID_CCF}/all_ccfs_${now}.tmp |
195 |
|
196 |
done < ${VALID_CCF}/xsaobslog_${now}.txt |
197 |
|
198 |
# Make a table of classes and issues type "Pipeline Release Notes" |
199 |
|
200 |
# Lists all CCF classes |
201 |
|
202 |
ccf_classes="${VALID_CCF}/ccf_classes.txt" |
203 |
|
204 |
while read line |
205 |
do |
206 |
class=`echo $line | awk -F"_" '{print $1"_"$2}'` |
207 |
echo $class >> ${ccf_classes} |
208 |
done < ${VALID_CCF}/all_ccfs_${now}.txt |
209 |
|
210 |
cat ${ccf_classes} | sort -u > ${VALID_CCF}/kk.txt |
211 |
mv ${VALID_CCF}/kk.txt ${ccf_classes} |
212 |
|
213 |
# Output final file is named all_ccfs_${now}_table.txt |
214 |
|
215 |
total_number_of_ccfs=0 |
216 |
|
217 |
all_ccfs_table="${VALID_CCF}/all_ccfs_${now}_table.txt" |
218 |
|
219 |
[ -f "${all_ccfs_table}" ] && rm ${all_ccfs_table} |
220 |
|
221 |
echo "Table of valid CCFs at $now_table" >> ${all_ccfs_table} |
222 |
echo >> ${all_ccfs_table} |
223 |
echo "|================================|=============|=======|" >> ${all_ccfs_table} |
224 |
echo "| Calibration File | Issue range | Count |" >> ${all_ccfs_table} |
225 |
echo "|================================|=============|=======|" >> ${all_ccfs_table} |
226 |
|
227 |
while read ccf_class |
228 |
do |
229 |
echo -n "Processing class $ccf_class ..." |
230 |
issues_file="${VALID_CCF}/ccf_issues_for_${ccf_class}.txt" |
231 |
[ -f "${issues_file}" ] && rm ${issues_file} |
232 |
while read line |
233 |
do |
234 |
class=`echo $line | awk -F"_" '{print $1"_"$2}'` |
235 |
[ "$class" != "$ccf_class" ] && continue |
236 |
issue=`echo $line | awk -F"_" '{print $3}' | cut -d. -f1` |
237 |
echo $issue >> ${issues_file} |
238 |
done < ${VALID_CCF}/all_ccfs_${now}.txt |
239 |
|
240 |
cat ${issues_file} | sort -u > kk.txt |
241 |
mv kk.txt ${issues_file} |
242 |
real_number_of_issues_for_this_class=`wc -l ${issues_file} | awk -F" " '{print $1}'` |
243 |
|
244 |
first_issue=`head -1 ${issues_file}` |
245 |
last_issue=`tail -1 ${issues_file}` |
246 |
|
247 |
li=$((10#$last_issue)) |
248 |
fi=$((10#$first_issue)) |
249 |
|
250 |
sequential_number_of_issues_for_this_class=$((li - fi + 1)) |
251 |
if [ "${real_number_of_issues_for_this_class}" -lt "${sequential_number_of_issues_for_this_class}" ] ; then |
252 |
asterisk="(*)" |
253 |
else |
254 |
asterisk=" " |
255 |
fi |
256 |
total_number_of_ccfs=$((real_number_of_issues_for_this_class + total_number_of_ccfs)) |
257 |
|
258 |
rn=$real_number_of_issues_for_this_class |
259 |
|
260 |
if [ "${first_issue}" != "${last_issue}" ] ; then |
261 |
printf "| %-30s | %4s-%4s%3s| %4s |\n" ${ccf_class} ${first_issue} ${last_issue} ${asterisk} ${rn} >> ${all_ccfs_table} |
262 |
else |
263 |
printf "| %-30s | %4s | %4s |\n" ${ccf_class} ${first_issue} ${rn} >> ${all_ccfs_table} |
264 |
fi |
265 |
|
266 |
echo "done" |
267 |
done < ${ccf_classes} |
268 |
|
269 |
echo "|================================|=============|=======|" >> ${all_ccfs_table} |
270 |
echo >> ${all_ccfs_table} |
271 |
echo "(*): Incomplete range." >> ${all_ccfs_table} |
272 |
echo "" >> ${all_ccfs_table} |
273 |
echo "Total number of valid CCFs required : ${total_number_of_ccfs}" >> ${all_ccfs_table} |
274 |
rm ${VALID_CCF}/ccf_classes.txt |
275 |
rm ${VALID_CCF}/ccf_issues_for*.txt |