cta_root2caldb.py
1 |
#! /usr/bin/env python
|
---|---|
2 |
# ==========================================================================
|
3 |
# This script generates IRFs in the CALDB format of HEASARC using ROOT 2D
|
4 |
# performance files.
|
5 |
# -------------------------------------------------------------------------
|
6 |
# Copyright (C) 2011-2012 Juergen Knoedlseder
|
7 |
# -------------------------------------------------------------------------
|
8 |
#
|
9 |
# This program is free software: you can redistribute it and/or modify
|
10 |
# it under the terms of the GNU General Public License as published by
|
11 |
# the Free Software Foundation, either version 3 of the License, or
|
12 |
# (at your option) any later version.
|
13 |
#
|
14 |
# This program is distributed in the hope that it will be useful,
|
15 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
16 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
17 |
# GNU General Public License for more details.
|
18 |
#
|
19 |
# You should have received a copy of the GNU General Public License
|
20 |
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
21 |
#
|
22 |
# -------------------------------------------------------------------------
|
23 |
# Requirements:
|
24 |
# - gammalib
|
25 |
# - pyroot
|
26 |
#
|
27 |
# ==========================================================================
|
28 |
from ROOT import TFile, TH1F, TH2F |
29 |
from gammalib import * |
30 |
from datetime import datetime |
31 |
import math |
32 |
import sys |
33 |
import glob |
34 |
import os |
35 |
|
36 |
|
37 |
# =========== #
|
38 |
# CalDB class #
|
39 |
# =========== #
|
40 |
class caldb(): |
41 |
"""
|
42 |
"""
|
43 |
def __init__(self, inst, obsid, path=""): |
44 |
"""
|
45 |
CALDB constructor. The constructor will create one CALDB entry for
|
46 |
an observation.
|
47 |
|
48 |
Parameters:
|
49 |
inst - Instrument string.
|
50 |
obsid - Observation ID string.
|
51 |
Keywords:
|
52 |
path - CALDB Root path.
|
53 |
"""
|
54 |
# Store root path
|
55 |
self.path = path
|
56 |
|
57 |
# Initialise some members
|
58 |
self.cif = None |
59 |
self.irf = None |
60 |
self.ea = None |
61 |
self.psf = None |
62 |
self.edisp = None |
63 |
self.bgd = None |
64 |
self.hdu_cif = None |
65 |
|
66 |
# Store UTC date of job execution
|
67 |
self.utc = datetime.utcnow().isoformat()[:19] |
68 |
|
69 |
# Set CALDB information
|
70 |
self.cal_tel = "CTA" |
71 |
self.cal_inst = inst.upper()
|
72 |
self.cal_obsid = obsid
|
73 |
self.cal_det = "NONE" |
74 |
self.cal_flt = "NONE" |
75 |
self.cal_class = "BCF" |
76 |
self.cal_type = "DATA" |
77 |
self.cal_qual = 0 # 0=good, 1=bad, 2=dubious, ... |
78 |
self.cal_date = "11/01/01" |
79 |
self.val_date = "2011-01-01" |
80 |
self.val_time = "00:00:00" |
81 |
self.ref_time = 51544.0 |
82 |
self.cal_version = "VERSION(DC1)" |
83 |
self.cal_cut = "CLASS(BEST)" |
84 |
self.cal_analysis = "ANALYSIS(IFAE)" |
85 |
#
|
86 |
self.ea_name = "EFF_AREA" |
87 |
self.ea_doc = "CAL/GEN/92-019" |
88 |
self.ea_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
89 |
self.ea_desc = "CTA effective area" |
90 |
#
|
91 |
self.psf_name = "RPSF" |
92 |
self.psf_doc = "CAL/GEN/92-020" |
93 |
self.psf_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
94 |
self.psf_desc = "CTA point spread function" |
95 |
#
|
96 |
self.edisp_name = "EDISP" |
97 |
self.edisp_doc = "???" |
98 |
self.edisp_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
99 |
self.edisp_desc = "CTA energy dispersion" |
100 |
#
|
101 |
self.bgd_name = "BGD" |
102 |
self.bgd_doc = "???" |
103 |
self.bgd_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
104 |
self.bgd_desc = "CTA background" |
105 |
|
106 |
# Create directory structure
|
107 |
self.make_dirs()
|
108 |
|
109 |
# Return
|
110 |
return
|
111 |
|
112 |
def __del__(self): |
113 |
"""
|
114 |
Destructor. Close all FITS files.
|
115 |
"""
|
116 |
# Close calibration
|
117 |
self.close()
|
118 |
|
119 |
# Return
|
120 |
return
|
121 |
|
122 |
def make_dirs(self): |
123 |
"""
|
124 |
Generate CALDB directory structure for one observation identifier.
|
125 |
The structure is given by
|
126 |
|
127 |
data/<tel>/<inst>/bcf/<obsid>
|
128 |
|
129 |
where <tel> is "cta" and <inst> is the instrument specified in the
|
130 |
CALDB constructor (the instrument may be used for different array
|
131 |
configurations).
|
132 |
|
133 |
Parameters:
|
134 |
None
|
135 |
Keywords:
|
136 |
None
|
137 |
"""
|
138 |
# Set base path
|
139 |
self.base_dir = "data" |
140 |
self.base_dir += "/"+self.cal_tel.lower() |
141 |
self.base_dir += "/"+self.cal_inst.lower() |
142 |
|
143 |
# Set directory names
|
144 |
self.bcf_dir = self.base_dir+"/bcf" |
145 |
self.obs_dir = self.bcf_dir+"/"+self.cal_obsid |
146 |
self.ea_dir = self.obs_dir |
147 |
self.psf_dir = self.obs_dir |
148 |
self.edisp_dir = self.obs_dir |
149 |
self.bgd_dir = self.obs_dir |
150 |
|
151 |
# Optionally prefix path
|
152 |
if len(self.path) > 0: |
153 |
self.base_path = self.path+"/"+self.base_dir |
154 |
self.bcf_path = self.path+"/"+self.bcf_dir |
155 |
self.obs_path = self.path+"/"+self.obs_dir |
156 |
self.ea_path = self.path+"/"+self.ea_dir |
157 |
self.psf_path = self.path+"/"+self.psf_dir |
158 |
self.edisp_path = self.path+"/"+self.edisp_dir |
159 |
self.bgd_path = self.path+"/"+self.bgd_dir |
160 |
else:
|
161 |
self.base_path = self.base_dir |
162 |
self.bcf_path = self.bcf_dir |
163 |
self.obs_path = self.obs_dir |
164 |
self.ea_path = self.ea_dir |
165 |
self.psf_path = self.psf_dir |
166 |
self.edisp_path = self.edisp_dir |
167 |
self.bgd_path = self.bgd_dir |
168 |
|
169 |
# Create directory structure
|
170 |
if not os.path.isdir(self.ea_path): |
171 |
os.makedirs(self.ea_path)
|
172 |
if not os.path.isdir(self.psf_path): |
173 |
os.makedirs(self.psf_path)
|
174 |
if not os.path.isdir(self.edisp_path): |
175 |
os.makedirs(self.edisp_path)
|
176 |
if not os.path.isdir(self.bgd_path): |
177 |
os.makedirs(self.bgd_path)
|
178 |
|
179 |
# Return
|
180 |
return
|
181 |
|
182 |
def open(self, version, split=False, clobber=True): |
183 |
"""
|
184 |
Open existing or create new calibration. The actual version will put
|
185 |
all calibrations in the same file, although each part of the response
|
186 |
function will have its own logical name. We can thus easily modify
|
187 |
the script to put each calibration information in a separate file.
|
188 |
|
189 |
Parameters:
|
190 |
version - Filename version
|
191 |
Keywords:
|
192 |
split - Split IRF over several files?
|
193 |
clobber - Overwrite existing files?
|
194 |
"""
|
195 |
# Set calibrate file names
|
196 |
if split:
|
197 |
self.ea_file = "ea_"+version+".fits" |
198 |
self.psf_file = "psf_"+version+".fits" |
199 |
self.edisp_file = "edisp_"+version+".fits" |
200 |
self.bgd_file = "bgd_"+version+".fits" |
201 |
else:
|
202 |
self.ea_file = "irf_"+version+".fits" |
203 |
self.psf_file = "irf_"+version+".fits" |
204 |
self.edisp_file = "irf_"+version+".fits" |
205 |
self.bgd_file = "irf_"+version+".fits" |
206 |
|
207 |
# Open calibration database index
|
208 |
self.cif = GFits(self.base_path+"/caldb.indx", True) |
209 |
|
210 |
# If file has no CIF extension than create it now
|
211 |
try:
|
212 |
self.hdu_cif = self.cif.table("CIF") |
213 |
except:
|
214 |
self.create_cif()
|
215 |
self.hdu_cif = self.cif.table("CIF") |
216 |
|
217 |
# Set filenames
|
218 |
ea_filename = self.ea_path+"/"+self.ea_file |
219 |
psf_filename = self.psf_path+"/"+self.psf_file |
220 |
edisp_filename = self.edisp_path+"/"+self.edisp_file |
221 |
bgd_filename = self.bgd_path+"/"+self.bgd_file |
222 |
|
223 |
# Open files
|
224 |
if split:
|
225 |
self.ea = GFits(ea_filename, True) |
226 |
self.psf = GFits(psf_filename, True) |
227 |
self.edisp = GFits(edisp_filename, True) |
228 |
self.bgd = GFits(bgd_filename, True) |
229 |
else:
|
230 |
self.irf = GFits(ea_filename, True) |
231 |
self.ea = self.irf |
232 |
self.psf = self.irf |
233 |
self.edisp = self.irf |
234 |
self.bgd = self.irf |
235 |
|
236 |
# Open HDUs
|
237 |
self.open_ea()
|
238 |
self.open_psf()
|
239 |
self.open_edisp()
|
240 |
self.open_bgd()
|
241 |
|
242 |
# Return
|
243 |
return
|
244 |
|
245 |
def close(self): |
246 |
"""
|
247 |
Close calibration FITS files.
|
248 |
|
249 |
Parameters:
|
250 |
None
|
251 |
Keywords:
|
252 |
None
|
253 |
"""
|
254 |
# Add information to CIF. We do this now as before we did not
|
255 |
# necessarily have all the information at hand (in particular
|
256 |
# about the boundaries)
|
257 |
self.add_cif_info()
|
258 |
|
259 |
# Close CIF
|
260 |
if self.cif != None: |
261 |
self.cif.save(True) |
262 |
self.cif.close()
|
263 |
self.cif = None |
264 |
|
265 |
# If IRF file exists then close it now. All file pointers will
|
266 |
# be set to None
|
267 |
if self.irf != None: |
268 |
self.irf.save(True) |
269 |
self.irf.close()
|
270 |
self.irf = None |
271 |
self.ea = None |
272 |
self.psf = None |
273 |
self.edisp = None |
274 |
self.bgd = None |
275 |
|
276 |
# ... otherwise we have split files, so we have to close them
|
277 |
# all
|
278 |
else:
|
279 |
|
280 |
# Close effective area file
|
281 |
if self.ea != None: |
282 |
self.ea.save(True) |
283 |
self.ea.close()
|
284 |
self.ea = None |
285 |
|
286 |
# Close point spread function file
|
287 |
if self.psf != None: |
288 |
self.psf.save(True) |
289 |
self.psf.close()
|
290 |
self.psf = None |
291 |
|
292 |
# Close energy dispersion file
|
293 |
if self.edisp != None: |
294 |
self.edisp.save(True) |
295 |
self.edisp.close()
|
296 |
self.edisp = None |
297 |
|
298 |
# Close background file
|
299 |
if self.bgd != None: |
300 |
self.bgd.save(True) |
301 |
self.bgd.close()
|
302 |
self.bgd = None |
303 |
|
304 |
# Return
|
305 |
return
|
306 |
|
307 |
def create_cif(self): |
308 |
"""
|
309 |
Create Calibration Index File (CIF) extension in CIF FITS file.
|
310 |
|
311 |
Parameters:
|
312 |
None
|
313 |
Keywords:
|
314 |
None
|
315 |
"""
|
316 |
# Create binary table
|
317 |
table = GFitsBinTable() |
318 |
|
319 |
# Set boundary
|
320 |
|
321 |
# Attach columns. Reference: CAL/GEN/92-008
|
322 |
table.append_column(GFitsTableStringCol("TELESCOP", 0, 10)) |
323 |
table.append_column(GFitsTableStringCol("INSTRUME", 0, 10)) |
324 |
table.append_column(GFitsTableStringCol("DETNAM", 0, 20)) |
325 |
table.append_column(GFitsTableStringCol("FILTER", 0, 10)) |
326 |
table.append_column(GFitsTableStringCol("CAL_DEV", 0, 20)) |
327 |
table.append_column(GFitsTableStringCol("CAL_DIR", 0, 70)) |
328 |
table.append_column(GFitsTableStringCol("CAL_FILE", 0, 40)) |
329 |
table.append_column(GFitsTableStringCol("CAL_CLAS", 0, 3)) |
330 |
table.append_column(GFitsTableStringCol("CAL_DTYP", 0, 4)) |
331 |
table.append_column(GFitsTableStringCol("CAL_CNAM", 0, 20)) |
332 |
table.append_column(GFitsTableStringCol("CAL_CBD", 0, 70, 9)) |
333 |
table.append_column(GFitsTableShortCol("CAL_XNO", 0)) |
334 |
table.append_column(GFitsTableStringCol("CAL_VSD", 0, 10)) |
335 |
table.append_column(GFitsTableStringCol("CAL_VST", 0, 8)) |
336 |
table.append_column(GFitsTableDoubleCol("REF_TIME", 0)) |
337 |
table.append_column(GFitsTableShortCol("CAL_QUAL", 0)) |
338 |
table.append_column(GFitsTableStringCol("CAL_DATE", 0, 8)) |
339 |
table.append_column(GFitsTableStringCol("CAL_DESC", 0, 70)) |
340 |
|
341 |
# Set keywords. Reference: CAL/GEN/92-008
|
342 |
table.extname("CIF")
|
343 |
table.card("CIFVERSN", "1992a", "Version of CIF format") |
344 |
|
345 |
# Attach table to FITS file. Note that at this moment the
|
346 |
# FITS table gets connected to the FITS file. Yet since
|
347 |
# nothing was yet written to the FITS file, we cannot read
|
348 |
# anything from it.
|
349 |
self.cif.append(table)
|
350 |
|
351 |
# Return
|
352 |
return
|
353 |
|
354 |
def add_cif_info(self): |
355 |
"""
|
356 |
Add information to CIF extension.
|
357 |
|
358 |
Parameters:
|
359 |
None
|
360 |
Keywords:
|
361 |
None
|
362 |
"""
|
363 |
# Append 3 rows to CIF extension
|
364 |
self.hdu_cif.append_rows(3) |
365 |
|
366 |
# Add generic information for these 3 rows
|
367 |
for i in range(3): |
368 |
|
369 |
# Set row number
|
370 |
row = i+self.hdu_cif.nrows()-3 |
371 |
|
372 |
# Set element
|
373 |
self.hdu_cif["TELESCOP"][row] = self.cal_tel |
374 |
self.hdu_cif["INSTRUME"][row] = self.cal_inst |
375 |
self.hdu_cif["DETNAM"][row] = self.cal_det |
376 |
self.hdu_cif["FILTER"][row] = self.cal_flt |
377 |
self.hdu_cif["CAL_DEV"][row] = "ONLINE" |
378 |
self.hdu_cif["CAL_CLAS"][row] = self.cal_class |
379 |
self.hdu_cif["CAL_DTYP"][row] = self.cal_type |
380 |
self.hdu_cif["CAL_VSD"][row] = self.val_date |
381 |
self.hdu_cif["CAL_VST"][row] = self.val_time |
382 |
self.hdu_cif["REF_TIME"][row] = self.ref_time |
383 |
self.hdu_cif["CAL_QUAL"][row] = self.cal_qual |
384 |
self.hdu_cif["CAL_DATE"][row] = self.cal_date |
385 |
|
386 |
# Add effective area information
|
387 |
row = self.hdu_cif.nrows()-3 |
388 |
self.hdu_cif["CAL_DIR"][row] = self.ea_dir |
389 |
self.hdu_cif["CAL_FILE"][row] = self.ea_file |
390 |
self.hdu_cif["CAL_CNAM"][row] = self.ea_name |
391 |
self.hdu_cif["CAL_DESC"][row] = self.ea_desc |
392 |
self.hdu_cif["CAL_XNO"][row] = 1 |
393 |
for n in range(9): |
394 |
if n >= len(self.ea_bounds): |
395 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
396 |
else:
|
397 |
self.hdu_cif["CAL_CBD"][row, n] = self.ea_bounds[n] |
398 |
|
399 |
# Add point spread function information
|
400 |
row = self.hdu_cif.nrows()-2 |
401 |
self.hdu_cif["CAL_DIR"][row] = self.psf_dir |
402 |
self.hdu_cif["CAL_FILE"][row] = self.psf_file |
403 |
self.hdu_cif["CAL_CNAM"][row] = self.psf_name |
404 |
self.hdu_cif["CAL_DESC"][row] = self.psf_desc |
405 |
self.hdu_cif["CAL_XNO"][row] = 1 |
406 |
for n in range(9): |
407 |
if n >= len(self.psf_bounds): |
408 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
409 |
else:
|
410 |
self.hdu_cif["CAL_CBD"][row, n] = self.psf_bounds[n] |
411 |
|
412 |
# Add energy dispersion information
|
413 |
row = self.hdu_cif.nrows()-1 |
414 |
self.hdu_cif["CAL_DIR"][row] = self.edisp_dir |
415 |
self.hdu_cif["CAL_FILE"][row] = self.edisp_file |
416 |
self.hdu_cif["CAL_CNAM"][row] = self.edisp_name |
417 |
self.hdu_cif["CAL_DESC"][row] = self.edisp_desc |
418 |
self.hdu_cif["CAL_XNO"][row] = 1 |
419 |
for n in range(9): |
420 |
if n >= len(self.edisp_bounds): |
421 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
422 |
else:
|
423 |
self.hdu_cif["CAL_CBD"][row, n] = self.edisp_bounds[n] |
424 |
|
425 |
# Add background information
|
426 |
row = self.hdu_cif.nrows()-1 |
427 |
self.hdu_cif["CAL_DIR"][row] = self.bgd_dir |
428 |
self.hdu_cif["CAL_FILE"][row] = self.bgd_file |
429 |
self.hdu_cif["CAL_CNAM"][row] = self.bgd_name |
430 |
self.hdu_cif["CAL_DESC"][row] = self.bgd_desc |
431 |
self.hdu_cif["CAL_XNO"][row] = 1 |
432 |
for n in range(9): |
433 |
if n >= len(self.bgd_bounds): |
434 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
435 |
else:
|
436 |
self.hdu_cif["CAL_CBD"][row, n] = self.bgd_bounds[n] |
437 |
|
438 |
# Return
|
439 |
return
|
440 |
|
441 |
def open_ea(self): |
442 |
"""
|
443 |
Open effective area extension. If no extension was found
|
444 |
then create one. We do not yet set any data as we don't
|
445 |
know the dimensions of the table yet.
|
446 |
|
447 |
Parameters:
|
448 |
None
|
449 |
Keywords:
|
450 |
None
|
451 |
"""
|
452 |
# Get extension. If it does not exist then create it now.
|
453 |
try:
|
454 |
self.hdu_ea = self.ea.table("EFFECTIVE AREA") |
455 |
except:
|
456 |
# Create binary table
|
457 |
table = GFitsBinTable() |
458 |
|
459 |
# Set extension name
|
460 |
table.extname("EFFECTIVE AREA")
|
461 |
|
462 |
# Set OGIP keywords
|
463 |
self.set_ogip_keywords(table, self.ea_doc, \ |
464 |
["RESPONSE", self.ea_name]) |
465 |
|
466 |
# Append table
|
467 |
self.ea.append(table)
|
468 |
|
469 |
# Get extension
|
470 |
self.hdu_ea = self.ea.table("EFFECTIVE AREA") |
471 |
|
472 |
# Return
|
473 |
return
|
474 |
|
475 |
def open_psf(self): |
476 |
"""
|
477 |
Open point spread function extension. If no extension was
|
478 |
found then create one. We do not yet set any data as we don't
|
479 |
know the dimensions of the table yet.
|
480 |
|
481 |
Parameters:
|
482 |
None
|
483 |
Keywords:
|
484 |
None
|
485 |
"""
|
486 |
# Get extension. If it does not exist then create it now.
|
487 |
try:
|
488 |
self.hdu_psf = self.psf.table("POINT SPREAD FUNCTION") |
489 |
except:
|
490 |
# Create binary table
|
491 |
table = GFitsBinTable() |
492 |
|
493 |
# Set extension name
|
494 |
table.extname("POINT SPREAD FUNCTION")
|
495 |
|
496 |
# Set OGIP keywords
|
497 |
self.set_ogip_keywords(table, self.psf_doc, \ |
498 |
["RESPONSE", self.psf_name]) |
499 |
|
500 |
# Append table
|
501 |
self.psf.append(table)
|
502 |
|
503 |
# Get extension
|
504 |
self.hdu_psf = self.psf.table("POINT SPREAD FUNCTION") |
505 |
|
506 |
# Return
|
507 |
return
|
508 |
|
509 |
def open_edisp(self): |
510 |
"""
|
511 |
Open energy dispersion extension. If no extension was found
|
512 |
then create one. We do not yet set any data as we don't
|
513 |
know the dimensions of the table yet.
|
514 |
|
515 |
Parameters:
|
516 |
None
|
517 |
Keywords:
|
518 |
None
|
519 |
"""
|
520 |
# Get extension. If it does not exist then create it now.
|
521 |
try:
|
522 |
self.hdu_edisp = self.edisp.table("ENERGY DISPERSION") |
523 |
except:
|
524 |
# Create binary table
|
525 |
table = GFitsBinTable() |
526 |
|
527 |
# Set extension name
|
528 |
table.extname("ENERGY DISPERSION")
|
529 |
|
530 |
# Set OGIP keywords
|
531 |
self.set_ogip_keywords(table, self.edisp_doc, \ |
532 |
["RESPONSE", self.edisp_name]) |
533 |
|
534 |
# Append table
|
535 |
self.edisp.append(table)
|
536 |
|
537 |
# Get extension
|
538 |
self.hdu_edisp = self.edisp.table("ENERGY DISPERSION") |
539 |
|
540 |
# Return
|
541 |
return
|
542 |
|
543 |
def open_bgd(self): |
544 |
"""
|
545 |
Open background extension. If no extension was found
|
546 |
then create one. We do not yet set any data as we don't
|
547 |
know the dimensions of the table yet.
|
548 |
|
549 |
Parameters:
|
550 |
None
|
551 |
Keywords:
|
552 |
None
|
553 |
"""
|
554 |
# Get extension. If it does not exist then create it now.
|
555 |
try:
|
556 |
self.hdu_bgd = self.bgd.table("BACKGROUND") |
557 |
except:
|
558 |
# Create binary table
|
559 |
table = GFitsBinTable() |
560 |
|
561 |
# Set extension name
|
562 |
table.extname("BACKGROUND")
|
563 |
|
564 |
# Set OGIP keywords
|
565 |
self.set_ogip_keywords(table, self.bgd_doc, \ |
566 |
["RESPONSE", self.bgd_name]) |
567 |
|
568 |
# Append table
|
569 |
self.bgd.append(table)
|
570 |
|
571 |
# Get extension
|
572 |
self.hdu_bgd = self.bgd.table("BACKGROUND") |
573 |
|
574 |
# Return
|
575 |
return
|
576 |
|
577 |
def set_ogip_keywords(self, hdu, hdudoc, hduclas): |
578 |
"""
|
579 |
Set standard OGIP keywords for extension.
|
580 |
|
581 |
Parameters:
|
582 |
hdu - Header Data Unit.
|
583 |
hdudoc - Documentation reference string
|
584 |
hduclas - Array of HDUCLAS fields
|
585 |
Keywords:
|
586 |
None
|
587 |
"""
|
588 |
# Set keywords
|
589 |
hdu.card("ORIGIN", "IRAP", "Name of organization making this file") |
590 |
hdu.card("DATE", self.utc, "File creation date (YYYY-MM-DDThh:mm:ss UTC)") |
591 |
hdu.card("TELESCOP", self.cal_tel, "Name of telescope") |
592 |
hdu.card("INSTRUME", self.cal_inst, "Name of instrument") |
593 |
hdu.card("DETNAM", self.cal_det, "Name of detector") |
594 |
hdu.card("HDUCLASS", "OGIP", "") |
595 |
hdu.card("HDUDOC", hdudoc, "") |
596 |
for i, item in enumerate(hduclas): |
597 |
key = "HDUCLAS%d" % (i+1) |
598 |
hdu.card(key, item, "")
|
599 |
hdu.card("HDUVERS", "1.0.0", "") |
600 |
|
601 |
# Return
|
602 |
return
|
603 |
|
604 |
def make_2D(self, array, hdu, name, unit, scale=1.0): |
605 |
"""
|
606 |
Make 2D matrix as function of energy and offset angle from a
|
607 |
ROOT 2D histogram. If the HDU has already the energy and
|
608 |
offset angle columns, this method will simply add another data
|
609 |
column.
|
610 |
If name==None, the method will not append any data column.
|
611 |
|
612 |
Parameters:
|
613 |
array - ROOT 2D histogram.
|
614 |
hdu - FITS HDU.
|
615 |
name - Data column name.
|
616 |
unit - Data unit.
|
617 |
Keywords:
|
618 |
scale - Scaling factor for histogram values.
|
619 |
"""
|
620 |
# Extract energy and offset angle vectors
|
621 |
energies = array.GetXaxis() |
622 |
offsets = array.GetYaxis() |
623 |
neng = energies.GetNbins() |
624 |
noffset = offsets.GetNbins() |
625 |
|
626 |
# Attach columns to HDU. We do this only if they do not yet
|
627 |
# exist. This allows adding columns to the same HDU in successive
|
628 |
# calls of this method
|
629 |
#
|
630 |
# ENERG_LO
|
631 |
if not hdu.hascolumn("ENERG_LO"): |
632 |
hdu.append_column(GFitsTableFloatCol("ENERG_LO", 1, neng)) |
633 |
hdu["ENERG_LO"].unit("TeV") |
634 |
for ieng in range(neng): |
635 |
e_lo = pow(10.0, energies.GetBinLowEdge(ieng+1)) |
636 |
hdu["ENERG_LO"][0,ieng] = e_lo |
637 |
#
|
638 |
# ENERG_HI
|
639 |
if not hdu.hascolumn("ENERG_HI"): |
640 |
hdu.append_column(GFitsTableFloatCol("ENERG_HI", 1, neng)) |
641 |
hdu["ENERG_HI"].unit("TeV") |
642 |
for ieng in range(neng): |
643 |
e_hi = pow(10.0, energies.GetBinUpEdge(ieng+1)) |
644 |
hdu["ENERG_HI"][0,ieng] = e_hi |
645 |
#
|
646 |
# THETA_LO
|
647 |
if not hdu.hascolumn("THETA_LO"): |
648 |
hdu.append_column(GFitsTableFloatCol("THETA_LO", 1, noffset)) |
649 |
hdu["THETA_LO"].unit("deg") |
650 |
for ioff in range(noffset): |
651 |
o_lo = offsets.GetBinLowEdge(ioff+1)
|
652 |
hdu["THETA_LO"][0,ioff] = o_lo |
653 |
|
654 |
#
|
655 |
# THETA_LO
|
656 |
if not hdu.hascolumn("THETA_HI"): |
657 |
hdu.append_column(GFitsTableFloatCol("THETA_HI", 1, noffset)) |
658 |
hdu["THETA_HI"].unit("deg") |
659 |
for ioff in range(noffset): |
660 |
o_hi = offsets.GetBinUpEdge(ioff+1)
|
661 |
hdu["THETA_HI"][0,ioff] = o_hi |
662 |
#
|
663 |
# "NAME"
|
664 |
if name != None and not hdu.hascolumn(name): |
665 |
hdu.append_column(GFitsTableFloatCol(name, 1, neng*noffset))
|
666 |
hdu[name].unit(unit) |
667 |
hdu[name].dim([neng, noffset]) |
668 |
for ioff in range(noffset): |
669 |
for ieng in range(neng): |
670 |
index = ieng + ioff * neng |
671 |
value = array.GetBinContent(ieng+1,ioff+1) |
672 |
hdu[name][0,index] = value * scale
|
673 |
|
674 |
# Collect boundary information
|
675 |
bd_eng = "ENERG(%.4f-%.2f)TeV" % \
|
676 |
(pow(10.0, energies.GetBinLowEdge(1)), \ |
677 |
pow(10.0, energies.GetBinUpEdge(neng))) |
678 |
bd_off = "THETA(%.2f-%.2f)deg" % \
|
679 |
(offsets.GetBinLowEdge(1), \
|
680 |
offsets.GetBinUpEdge(noffset)) |
681 |
bd_phi = "PHI(0-360)deg"
|
682 |
bounds = [bd_eng, bd_off, bd_phi] |
683 |
|
684 |
# Return boundary information
|
685 |
return bounds
|
686 |
|
687 |
def root2caldb(self, filename): |
688 |
"""
|
689 |
Translate ROOT to CALDB information.
|
690 |
|
691 |
Parameters:
|
692 |
filename - ROOT 2D performance filename.
|
693 |
"""
|
694 |
# Open performance file
|
695 |
file = TFile(filename) |
696 |
|
697 |
# Create effective area
|
698 |
self.root2ea(file) |
699 |
|
700 |
# Create point spread function
|
701 |
self.root2psf(file) |
702 |
|
703 |
# Create energy dispersion
|
704 |
self.root2edisp(file) |
705 |
|
706 |
# Create background
|
707 |
self.root2bgd(file) |
708 |
|
709 |
# Return
|
710 |
return
|
711 |
|
712 |
def root2ea(self, file): |
713 |
"""
|
714 |
Translate ROOT to CALDB effective area extension. The following ROOT
|
715 |
histograms are used:
|
716 |
|
717 |
EffectiveArea80_offaxis -> EFFAREA (scaling: 1/0.8)
|
718 |
EffectiveArea80_offaxis -> EFFAREA_RECO (scaling: 1/0.8)
|
719 |
|
720 |
Parameters:
|
721 |
file - ROOT file.
|
722 |
Keywords:
|
723 |
None
|
724 |
"""
|
725 |
# Continue only if effective area HDU has been opened
|
726 |
if self.hdu_ea != None: |
727 |
|
728 |
# Allocate ROOT 2D array
|
729 |
array = TH2F() |
730 |
|
731 |
# EFFAREA (2D effective 80% area)
|
732 |
file.GetObject("EffectiveArea80_offaxis", array) |
733 |
bounds = self.make_2D(array, self.hdu_ea, "EFFAREA", "m2", scale=1.0/0.8) |
734 |
for b in bounds: |
735 |
self.ea_bounds.append(b)
|
736 |
self.set_cif_keywords(self.hdu_ea, self.ea_name, \ |
737 |
self.ea_bounds, self.ea_desc) |
738 |
|
739 |
# EFFAREA_RECO (2D effective 80% area)
|
740 |
file.GetObject("EffectiveArea80_offaxis", array) |
741 |
self.make_2D(array, self.hdu_ea, "EFFAREA_RECO", "m2", scale=1.0/0.8) |
742 |
|
743 |
# Return
|
744 |
return
|
745 |
|
746 |
def root2psf(self, file): |
747 |
"""
|
748 |
Translate ROOT to CALDB point spread function extension. The following
|
749 |
ROOT histograms are used:
|
750 |
|
751 |
1.0 -> SCALE
|
752 |
AngRes_offaxis -> SIGMA_1 (scaling: 1/0.8)
|
753 |
0.0 -> AMPL_2
|
754 |
0.0 -> SIGMA_2
|
755 |
0.0 -> AMPL_3
|
756 |
0.0 -> SIGMA_3
|
757 |
|
758 |
Parameters:
|
759 |
file - ROOT file.
|
760 |
Keywords:
|
761 |
None
|
762 |
"""
|
763 |
# Continue only if point spread function HDU has been opened
|
764 |
if self.hdu_psf != None: |
765 |
|
766 |
# Allocate ROOT 2D array
|
767 |
r68 = TH2F() |
768 |
|
769 |
# Read histograms
|
770 |
file.GetObject("AngRes_offaxis", r68) |
771 |
unit = r68.Clone() |
772 |
zero = r68.Clone() |
773 |
neng = r68.GetXaxis().GetNbins() |
774 |
noffset = r68.GetYaxis().GetNbins() |
775 |
for ioff in range(noffset): |
776 |
for ieng in range(neng): |
777 |
unit.SetBinContent(ieng+1,ioff+1,1.0) |
778 |
zero.SetBinContent(ieng+1,ioff+1,0.0) |
779 |
|
780 |
# Set boundaries
|
781 |
bounds = self.make_2D(unit, self.hdu_psf, None, "deg") |
782 |
for b in bounds: |
783 |
self.psf_bounds.append(b)
|
784 |
self.set_cif_keywords(self.hdu_psf, self.psf_name, \ |
785 |
self.psf_bounds, self.psf_desc) |
786 |
|
787 |
# SCALE
|
788 |
self.make_2D(unit, self.hdu_psf, "SCALE", "") |
789 |
|
790 |
# SIGMA_1
|
791 |
self.make_2D(r68, self.hdu_psf, "SIGMA_1", "deg") |
792 |
|
793 |
# AMPL_2
|
794 |
self.make_2D(zero, self.hdu_psf, "AMPL_2", "") |
795 |
|
796 |
# SIGMA_2
|
797 |
self.make_2D(zero, self.hdu_psf, "SIGMA_2", "deg") |
798 |
|
799 |
# AMPL_3
|
800 |
self.make_2D(zero, self.hdu_psf, "AMPL_3", "") |
801 |
|
802 |
# SIGMA_3
|
803 |
self.make_2D(zero, self.hdu_psf, "SIGMA_3", "deg") |
804 |
|
805 |
# Return
|
806 |
return
|
807 |
|
808 |
def root2edisp(self, file): |
809 |
"""
|
810 |
Translate ROOT to CALDB energy dispersion extension. The following ROOT
|
811 |
histograms are used:
|
812 |
|
813 |
ERes -> ERES68
|
814 |
|
815 |
Parameters:
|
816 |
file - ROOT file.
|
817 |
Keywords:
|
818 |
None
|
819 |
"""
|
820 |
# Continue only if energy dispersion HDU has been opened
|
821 |
if self.hdu_edisp != None: |
822 |
|
823 |
# Allocate ROOT 2D array
|
824 |
array = TH2F() |
825 |
|
826 |
# ERES68
|
827 |
file.GetObject("ERes", array) |
828 |
bounds = self.make_2D(array, self.hdu_edisp, "ERES68", "") |
829 |
for b in bounds: |
830 |
self.edisp_bounds.append(b)
|
831 |
self.set_cif_keywords(self.hdu_edisp, self.edisp_name, \ |
832 |
self.edisp_bounds, self.edisp_desc) |
833 |
|
834 |
# Return
|
835 |
return
|
836 |
|
837 |
def root2bgd(self, file): |
838 |
"""
|
839 |
Translate ROOT to CALDB background extension. The following ROOT
|
840 |
histograms are used:
|
841 |
|
842 |
TBD -> TBD
|
843 |
|
844 |
Parameters:
|
845 |
file - ROOT file.
|
846 |
Keywords:
|
847 |
None
|
848 |
"""
|
849 |
# Continue only if background HDU has been opened
|
850 |
if self.hdu_bgd != None: |
851 |
|
852 |
# Allocate ROOT 2D array
|
853 |
array = TH2F() |
854 |
|
855 |
# TBD
|
856 |
file.GetObject("ERes", array) |
857 |
bounds = self.make_2D(array, self.hdu_bgd, "BGD", "") |
858 |
for b in bounds: |
859 |
self.bgd_bounds.append(b)
|
860 |
self.set_cif_keywords(self.hdu_bgd, self.bgd_name, \ |
861 |
self.bgd_bounds, self.bgd_desc) |
862 |
|
863 |
#
|
864 |
file.GetObject("ERes", array) |
865 |
self.make_2D(array, self.hdu_bgd, "BGD_RECO", "") |
866 |
|
867 |
# Return
|
868 |
return
|
869 |
|
870 |
def set_cif_keywords(self, hdu, name, bounds,desc): |
871 |
"""
|
872 |
Set standard CIF keywords for extension.
|
873 |
"""
|
874 |
# Set standard CIF keywords
|
875 |
hdu.card("CSYSNAME", "XMA_POL", "") |
876 |
hdu.card("CCLS0001", self.cal_class, "Calibration class") |
877 |
hdu.card("CDTP0001", self.cal_type, "Calibration type") |
878 |
hdu.card("CCNM0001", name, "Calibration name") |
879 |
|
880 |
# Set boundary keywords
|
881 |
for n in range(9): |
882 |
keyname = "CBD%d0001" % (n+1) |
883 |
if n >= len(bounds): |
884 |
value = "NONE"
|
885 |
else:
|
886 |
value = bounds[n] |
887 |
hdu.card(keyname, value, "Calibration boundary")
|
888 |
|
889 |
# Set validity keywords
|
890 |
hdu.card("CVSD0001", self.val_date, "Calibration validity start date (UTC)") |
891 |
hdu.card("CVST0001", self.val_time, "Calibration validity start time (UTC)") |
892 |
hdu.card("CDEC0001", desc, "Calibration description") |
893 |
|
894 |
# Return
|
895 |
return
|
896 |
|
897 |
|
898 |
#==========================#
|
899 |
# Main routine entry point #
|
900 |
#==========================#
|
901 |
if __name__ == '__main__': |
902 |
"""
|
903 |
This script translates a CTA ROOT 2D performance file in a CALDB compliant
|
904 |
CTA response.
|
905 |
"""
|
906 |
# Set ROOT filename
|
907 |
#path = "/Users/jurgen/Documents/Travail/Projects/CTA/WP-MC/root/IFAEOffaxis"
|
908 |
#irf = "SubarrayE_offaxis.root"
|
909 |
path = "/Users/jurgen/Documents/Travail/Projects/CTA/WP-MC/root/IFAEOffaxisPerformanceBEI_Nov2011"
|
910 |
irf = "SubarrayE_IFAE_50hours_20111121_offaxis.root"
|
911 |
filename = path+"/"+irf
|
912 |
|
913 |
# Set observation identifiers
|
914 |
obsids = ["000001", "000002", "000003"] |
915 |
|
916 |
# Generate CALDB entries
|
917 |
for obsid in obsids: |
918 |
|
919 |
# Allocate caldb
|
920 |
db = caldb("E", obsid)
|
921 |
|
922 |
# Open calibration
|
923 |
db.open("test")
|
924 |
|
925 |
# Translate ROOT to CALDB information
|
926 |
db.root2caldb(filename) |
927 |
|