cta_root2caldb.py
1 |
#! /usr/bin/env python
|
---|---|
2 |
# ==========================================================================
|
3 |
# This script generates IRFs in the CALDB format of HEASARC using ROOT 2D
|
4 |
# performance files.
|
5 |
# -------------------------------------------------------------------------
|
6 |
# Copyright (C) 2011-2012 Juergen Knoedlseder
|
7 |
# -------------------------------------------------------------------------
|
8 |
#
|
9 |
# This program is free software: you can redistribute it and/or modify
|
10 |
# it under the terms of the GNU General Public License as published by
|
11 |
# the Free Software Foundation, either version 3 of the License, or
|
12 |
# (at your option) any later version.
|
13 |
#
|
14 |
# This program is distributed in the hope that it will be useful,
|
15 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
16 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
17 |
# GNU General Public License for more details.
|
18 |
#
|
19 |
# You should have received a copy of the GNU General Public License
|
20 |
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
21 |
#
|
22 |
# -------------------------------------------------------------------------
|
23 |
# Requirements:
|
24 |
# - gammalib
|
25 |
# - pyroot
|
26 |
# ==========================================================================
|
27 |
from ROOT import TFile, TH1F, TH2F |
28 |
from gammalib import * |
29 |
from datetime import datetime |
30 |
import math |
31 |
import sys |
32 |
import glob |
33 |
import os |
34 |
|
35 |
|
36 |
# =========== #
|
37 |
# CalDB class #
|
38 |
# =========== #
|
39 |
class caldb(): |
40 |
"""
|
41 |
"""
|
42 |
def __init__(self, inst, obsid, path=""): |
43 |
"""
|
44 |
CALDB constructor. The constructor will create one CALDB entry for
|
45 |
an observation.
|
46 |
|
47 |
Parameters:
|
48 |
inst - Instrument string.
|
49 |
obsid - Observation ID string.
|
50 |
Keywords:
|
51 |
path - CALDB Root path.
|
52 |
"""
|
53 |
# Store root path
|
54 |
self.path = path
|
55 |
|
56 |
# Initialise some members
|
57 |
self.cif = None |
58 |
self.irf = None |
59 |
self.ea = None |
60 |
self.psf = None |
61 |
self.edisp = None |
62 |
self.bgd = None |
63 |
self.hdu_cif = None |
64 |
|
65 |
# Store UTC date of job execution
|
66 |
self.utc = datetime.utcnow().isoformat()[:19] |
67 |
|
68 |
# Set CALDB information
|
69 |
self.cal_tel = "CTA" |
70 |
self.cal_inst = inst.upper()
|
71 |
self.cal_obsid = obsid
|
72 |
self.cal_det = "NONE" |
73 |
self.cal_flt = "NONE" |
74 |
self.cal_class = "BCF" |
75 |
self.cal_type = "DATA" |
76 |
self.cal_qual = 0 # 0=good, 1=bad, 2=dubious, ... |
77 |
self.cal_date = "11/01/01" |
78 |
self.val_date = "2011-01-01" |
79 |
self.val_time = "00:00:00" |
80 |
self.ref_time = 51544.0 |
81 |
self.cal_version = "VERSION(DC1)" |
82 |
self.cal_cut = "CLASS(BEST)" |
83 |
self.cal_analysis = "ANALYSIS(IFAE)" |
84 |
#
|
85 |
self.ea_name = "EFF_AREA" |
86 |
self.ea_doc = "CAL/GEN/92-019" |
87 |
self.ea_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
88 |
self.ea_desc = "CTA effective area" |
89 |
#
|
90 |
self.psf_name = "RPSF" |
91 |
self.psf_doc = "CAL/GEN/92-020" |
92 |
self.psf_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
93 |
self.psf_desc = "CTA point spread function" |
94 |
#
|
95 |
self.edisp_name = "EDISP" |
96 |
self.edisp_doc = "???" |
97 |
self.edisp_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
98 |
self.edisp_desc = "CTA energy dispersion" |
99 |
#
|
100 |
self.bgd_name = "BGD" |
101 |
self.bgd_doc = "???" |
102 |
self.bgd_bounds = [self.cal_version, self.cal_cut, self.cal_analysis] |
103 |
self.bgd_desc = "CTA background" |
104 |
|
105 |
# Create directory structure
|
106 |
self.make_dirs()
|
107 |
|
108 |
# Return
|
109 |
return
|
110 |
|
111 |
def __del__(self): |
112 |
"""
|
113 |
Destructor. Close all FITS files.
|
114 |
"""
|
115 |
# Close calibration
|
116 |
self.close()
|
117 |
|
118 |
# Return
|
119 |
return
|
120 |
|
121 |
def make_dirs(self): |
122 |
"""
|
123 |
Generate CALDB directory structure for one observation identifier.
|
124 |
The structure is given by
|
125 |
|
126 |
data/<tel>/<inst>/bcf/<obsid>
|
127 |
|
128 |
where <tel> is "cta" and <inst> is the instrument specified in the
|
129 |
CALDB constructor (the instrument may be used for different array
|
130 |
configurations).
|
131 |
|
132 |
Parameters:
|
133 |
None
|
134 |
Keywords:
|
135 |
None
|
136 |
"""
|
137 |
# Set base path
|
138 |
self.base_dir = "data" |
139 |
self.base_dir += "/"+self.cal_tel.lower() |
140 |
self.base_dir += "/"+self.cal_inst.lower() |
141 |
|
142 |
# Set directory names
|
143 |
self.bcf_dir = self.base_dir+"/bcf" |
144 |
self.obs_dir = self.bcf_dir+"/"+self.cal_obsid |
145 |
self.ea_dir = self.obs_dir |
146 |
self.psf_dir = self.obs_dir |
147 |
self.edisp_dir = self.obs_dir |
148 |
self.bgd_dir = self.obs_dir |
149 |
|
150 |
# Optionally prefix path
|
151 |
if len(self.path) > 0: |
152 |
self.base_path = self.path+"/"+self.base_dir |
153 |
self.bcf_path = self.path+"/"+self.bcf_dir |
154 |
self.obs_path = self.path+"/"+self.obs_dir |
155 |
self.ea_path = self.path+"/"+self.ea_dir |
156 |
self.psf_path = self.path+"/"+self.psf_dir |
157 |
self.edisp_path = self.path+"/"+self.edisp_dir |
158 |
self.bgd_path = self.path+"/"+self.bgd_dir |
159 |
else:
|
160 |
self.base_path = self.base_dir |
161 |
self.bcf_path = self.bcf_dir |
162 |
self.obs_path = self.obs_dir |
163 |
self.ea_path = self.ea_dir |
164 |
self.psf_path = self.psf_dir |
165 |
self.edisp_path = self.edisp_dir |
166 |
self.bgd_path = self.bgd_dir |
167 |
|
168 |
# Create directory structure
|
169 |
if not os.path.isdir(self.ea_path): |
170 |
os.makedirs(self.ea_path)
|
171 |
if not os.path.isdir(self.psf_path): |
172 |
os.makedirs(self.psf_path)
|
173 |
if not os.path.isdir(self.edisp_path): |
174 |
os.makedirs(self.edisp_path)
|
175 |
if not os.path.isdir(self.bgd_path): |
176 |
os.makedirs(self.bgd_path)
|
177 |
|
178 |
# Return
|
179 |
return
|
180 |
|
181 |
def open(self, version, split=False, clobber=True): |
182 |
"""
|
183 |
Open existing or create new calibration. The actual version will put
|
184 |
all calibrations in the same file, although each part of the response
|
185 |
function will have its own logical name. We can thus easily modify
|
186 |
the script to put each calibration information in a separate file.
|
187 |
|
188 |
Parameters:
|
189 |
version - Filename version
|
190 |
Keywords:
|
191 |
split - Split IRF over several files?
|
192 |
clobber - Overwrite existing files?
|
193 |
"""
|
194 |
# Set calibrate file names
|
195 |
if split:
|
196 |
self.ea_file = "ea_"+version+".fits" |
197 |
self.psf_file = "psf_"+version+".fits" |
198 |
self.edisp_file = "edisp_"+version+".fits" |
199 |
self.bgd_file = "bgd_"+version+".fits" |
200 |
else:
|
201 |
self.ea_file = "irf_"+version+".fits" |
202 |
self.psf_file = "irf_"+version+".fits" |
203 |
self.edisp_file = "irf_"+version+".fits" |
204 |
self.bgd_file = "irf_"+version+".fits" |
205 |
|
206 |
# Open calibration database index
|
207 |
self.cif = GFits(self.base_path+"/caldb.indx", True) |
208 |
|
209 |
# If file has no CIF extension than create it now
|
210 |
try:
|
211 |
self.hdu_cif = self.cif.table("CIF") |
212 |
except:
|
213 |
self.create_cif()
|
214 |
self.hdu_cif = self.cif.table("CIF") |
215 |
|
216 |
# Set filenames
|
217 |
ea_filename = self.ea_path+"/"+self.ea_file |
218 |
psf_filename = self.psf_path+"/"+self.psf_file |
219 |
edisp_filename = self.edisp_path+"/"+self.edisp_file |
220 |
bgd_filename = self.bgd_path+"/"+self.bgd_file |
221 |
|
222 |
# Open files
|
223 |
if split:
|
224 |
self.ea = GFits(ea_filename, True) |
225 |
self.psf = GFits(psf_filename, True) |
226 |
self.edisp = GFits(edisp_filename, True) |
227 |
self.bgd = GFits(bgd_filename, True) |
228 |
else:
|
229 |
self.irf = GFits(ea_filename, True) |
230 |
self.ea = self.irf |
231 |
self.psf = self.irf |
232 |
self.edisp = self.irf |
233 |
self.bgd = self.irf |
234 |
|
235 |
# Open HDUs
|
236 |
self.open_ea()
|
237 |
self.open_psf()
|
238 |
self.open_edisp()
|
239 |
self.open_bgd()
|
240 |
|
241 |
# Return
|
242 |
return
|
243 |
|
244 |
def close(self): |
245 |
"""
|
246 |
Close calibration FITS files.
|
247 |
|
248 |
Parameters:
|
249 |
None
|
250 |
Keywords:
|
251 |
None
|
252 |
"""
|
253 |
# Add information to CIF. We do this now as before we did not
|
254 |
# necessarily have all the information at hand (in particular
|
255 |
# about the boundaries)
|
256 |
self.add_cif_info()
|
257 |
|
258 |
# Close CIF
|
259 |
if self.cif != None: |
260 |
self.cif.save(True) |
261 |
self.cif.close()
|
262 |
self.cif = None |
263 |
|
264 |
# If IRF file exists then close it now. All file pointers will
|
265 |
# be set to None
|
266 |
if self.irf != None: |
267 |
self.irf.save(True) |
268 |
self.irf.close()
|
269 |
self.irf = None |
270 |
self.ea = None |
271 |
self.psf = None |
272 |
self.edisp = None |
273 |
self.bgd = None |
274 |
|
275 |
# ... otherwise we have split files, so we have to close them
|
276 |
# all
|
277 |
else:
|
278 |
|
279 |
# Close effective area file
|
280 |
if self.ea != None: |
281 |
self.ea.save(True) |
282 |
self.ea.close()
|
283 |
self.ea = None |
284 |
|
285 |
# Close point spread function file
|
286 |
if self.psf != None: |
287 |
self.psf.save(True) |
288 |
self.psf.close()
|
289 |
self.psf = None |
290 |
|
291 |
# Close energy dispersion file
|
292 |
if self.edisp != None: |
293 |
self.edisp.save(True) |
294 |
self.edisp.close()
|
295 |
self.edisp = None |
296 |
|
297 |
# Close background file
|
298 |
if self.bgd != None: |
299 |
self.bgd.save(True) |
300 |
self.bgd.close()
|
301 |
self.bgd = None |
302 |
|
303 |
# Return
|
304 |
return
|
305 |
|
306 |
def create_cif(self): |
307 |
"""
|
308 |
Create Calibration Index File (CIF) extension in CIF FITS file.
|
309 |
|
310 |
Parameters:
|
311 |
None
|
312 |
Keywords:
|
313 |
None
|
314 |
"""
|
315 |
# Create binary table
|
316 |
table = GFitsBinTable() |
317 |
|
318 |
# Set boundary
|
319 |
|
320 |
# Attach columns. Reference: CAL/GEN/92-008
|
321 |
table.append_column(GFitsTableStringCol("TELESCOP", 0, 10)) |
322 |
table.append_column(GFitsTableStringCol("INSTRUME", 0, 10)) |
323 |
table.append_column(GFitsTableStringCol("DETNAM", 0, 20)) |
324 |
table.append_column(GFitsTableStringCol("FILTER", 0, 10)) |
325 |
table.append_column(GFitsTableStringCol("CAL_DEV", 0, 20)) |
326 |
table.append_column(GFitsTableStringCol("CAL_DIR", 0, 70)) |
327 |
table.append_column(GFitsTableStringCol("CAL_FILE", 0, 40)) |
328 |
table.append_column(GFitsTableStringCol("CAL_CLAS", 0, 3)) |
329 |
table.append_column(GFitsTableStringCol("CAL_DTYP", 0, 4)) |
330 |
table.append_column(GFitsTableStringCol("CAL_CNAM", 0, 20)) |
331 |
table.append_column(GFitsTableStringCol("CAL_CBD", 0, 70, 9)) |
332 |
table.append_column(GFitsTableShortCol("CAL_XNO", 0)) |
333 |
table.append_column(GFitsTableStringCol("CAL_VSD", 0, 10)) |
334 |
table.append_column(GFitsTableStringCol("CAL_VST", 0, 8)) |
335 |
table.append_column(GFitsTableDoubleCol("REF_TIME", 0)) |
336 |
table.append_column(GFitsTableShortCol("CAL_QUAL", 0)) |
337 |
table.append_column(GFitsTableStringCol("CAL_DATE", 0, 8)) |
338 |
table.append_column(GFitsTableStringCol("CAL_DESC", 0, 70)) |
339 |
|
340 |
# Set keywords. Reference: CAL/GEN/92-008
|
341 |
table.extname("CIF")
|
342 |
table.card("CIFVERSN", "1992a", "Version of CIF format") |
343 |
|
344 |
# Attach table to FITS file. Note that at this moment the
|
345 |
# FITS table gets connected to the FITS file. Yet since
|
346 |
# nothing was yet written to the FITS file, we cannot read
|
347 |
# anything from it.
|
348 |
self.cif.append(table)
|
349 |
|
350 |
# Return
|
351 |
return
|
352 |
|
353 |
def add_cif_info(self): |
354 |
"""
|
355 |
Add information to CIF extension.
|
356 |
|
357 |
Parameters:
|
358 |
None
|
359 |
Keywords:
|
360 |
None
|
361 |
"""
|
362 |
# Append 3 rows to CIF extension
|
363 |
self.hdu_cif.append_rows(3) |
364 |
|
365 |
# Add generic information for these 3 rows
|
366 |
for i in range(3): |
367 |
|
368 |
# Set row number
|
369 |
row = i+self.hdu_cif.nrows()-3 |
370 |
|
371 |
# Set element
|
372 |
self.hdu_cif["TELESCOP"][row] = self.cal_tel |
373 |
self.hdu_cif["INSTRUME"][row] = self.cal_inst |
374 |
self.hdu_cif["DETNAM"][row] = self.cal_det |
375 |
self.hdu_cif["FILTER"][row] = self.cal_flt |
376 |
self.hdu_cif["CAL_DEV"][row] = "ONLINE" |
377 |
self.hdu_cif["CAL_CLAS"][row] = self.cal_class |
378 |
self.hdu_cif["CAL_DTYP"][row] = self.cal_type |
379 |
self.hdu_cif["CAL_VSD"][row] = self.val_date |
380 |
self.hdu_cif["CAL_VST"][row] = self.val_time |
381 |
self.hdu_cif["REF_TIME"][row] = self.ref_time |
382 |
self.hdu_cif["CAL_QUAL"][row] = self.cal_qual |
383 |
self.hdu_cif["CAL_DATE"][row] = self.cal_date |
384 |
|
385 |
# Add effective area information
|
386 |
row = self.hdu_cif.nrows()-3 |
387 |
self.hdu_cif["CAL_DIR"][row] = self.ea_dir |
388 |
self.hdu_cif["CAL_FILE"][row] = self.ea_file |
389 |
self.hdu_cif["CAL_CNAM"][row] = self.ea_name |
390 |
self.hdu_cif["CAL_DESC"][row] = self.ea_desc |
391 |
self.hdu_cif["CAL_XNO"][row] = 1 |
392 |
for n in range(9): |
393 |
if n >= len(self.ea_bounds): |
394 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
395 |
else:
|
396 |
self.hdu_cif["CAL_CBD"][row, n] = self.ea_bounds[n] |
397 |
|
398 |
# Add point spread function information
|
399 |
row = self.hdu_cif.nrows()-2 |
400 |
self.hdu_cif["CAL_DIR"][row] = self.psf_dir |
401 |
self.hdu_cif["CAL_FILE"][row] = self.psf_file |
402 |
self.hdu_cif["CAL_CNAM"][row] = self.psf_name |
403 |
self.hdu_cif["CAL_DESC"][row] = self.psf_desc |
404 |
self.hdu_cif["CAL_XNO"][row] = 1 |
405 |
for n in range(9): |
406 |
if n >= len(self.psf_bounds): |
407 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
408 |
else:
|
409 |
self.hdu_cif["CAL_CBD"][row, n] = self.psf_bounds[n] |
410 |
|
411 |
# Add energy dispersion information
|
412 |
row = self.hdu_cif.nrows()-1 |
413 |
self.hdu_cif["CAL_DIR"][row] = self.edisp_dir |
414 |
self.hdu_cif["CAL_FILE"][row] = self.edisp_file |
415 |
self.hdu_cif["CAL_CNAM"][row] = self.edisp_name |
416 |
self.hdu_cif["CAL_DESC"][row] = self.edisp_desc |
417 |
self.hdu_cif["CAL_XNO"][row] = 1 |
418 |
for n in range(9): |
419 |
if n >= len(self.edisp_bounds): |
420 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
421 |
else:
|
422 |
self.hdu_cif["CAL_CBD"][row, n] = self.edisp_bounds[n] |
423 |
|
424 |
# Add background information
|
425 |
row = self.hdu_cif.nrows()-1 |
426 |
self.hdu_cif["CAL_DIR"][row] = self.bgd_dir |
427 |
self.hdu_cif["CAL_FILE"][row] = self.bgd_file |
428 |
self.hdu_cif["CAL_CNAM"][row] = self.bgd_name |
429 |
self.hdu_cif["CAL_DESC"][row] = self.bgd_desc |
430 |
self.hdu_cif["CAL_XNO"][row] = 1 |
431 |
for n in range(9): |
432 |
if n >= len(self.bgd_bounds): |
433 |
self.hdu_cif["CAL_CBD"][row, n] = "NONE" |
434 |
else:
|
435 |
self.hdu_cif["CAL_CBD"][row, n] = self.bgd_bounds[n] |
436 |
|
437 |
# Return
|
438 |
return
|
439 |
|
440 |
def open_ea(self): |
441 |
"""
|
442 |
Open effective area extension. If no extension was found
|
443 |
then create one. We do not yet set any data as we don't
|
444 |
know the dimensions of the table yet.
|
445 |
|
446 |
Parameters:
|
447 |
None
|
448 |
Keywords:
|
449 |
None
|
450 |
"""
|
451 |
# Get extension. If it does not exist then create it now.
|
452 |
try:
|
453 |
self.hdu_ea = self.ea.table("EFFECTIVE AREA") |
454 |
except:
|
455 |
# Create binary table
|
456 |
table = GFitsBinTable() |
457 |
|
458 |
# Set extension name
|
459 |
table.extname("EFFECTIVE AREA")
|
460 |
|
461 |
# Set OGIP keywords
|
462 |
self.set_ogip_keywords(table, self.ea_doc, \ |
463 |
["RESPONSE", self.ea_name]) |
464 |
|
465 |
# Append table
|
466 |
self.ea.append(table)
|
467 |
|
468 |
# Get extension
|
469 |
self.hdu_ea = self.ea.table("EFFECTIVE AREA") |
470 |
|
471 |
# Return
|
472 |
return
|
473 |
|
474 |
def open_psf(self): |
475 |
"""
|
476 |
Open point spread function extension. If no extension was
|
477 |
found then create one. We do not yet set any data as we don't
|
478 |
know the dimensions of the table yet.
|
479 |
|
480 |
Parameters:
|
481 |
None
|
482 |
Keywords:
|
483 |
None
|
484 |
"""
|
485 |
# Get extension. If it does not exist then create it now.
|
486 |
try:
|
487 |
self.hdu_psf = self.psf.table("POINT SPREAD FUNCTION") |
488 |
except:
|
489 |
# Create binary table
|
490 |
table = GFitsBinTable() |
491 |
|
492 |
# Set extension name
|
493 |
table.extname("POINT SPREAD FUNCTION")
|
494 |
|
495 |
# Set OGIP keywords
|
496 |
self.set_ogip_keywords(table, self.psf_doc, \ |
497 |
["RESPONSE", self.psf_name]) |
498 |
|
499 |
# Append table
|
500 |
self.psf.append(table)
|
501 |
|
502 |
# Get extension
|
503 |
self.hdu_psf = self.psf.table("POINT SPREAD FUNCTION") |
504 |
|
505 |
# Return
|
506 |
return
|
507 |
|
508 |
def open_edisp(self): |
509 |
"""
|
510 |
Open energy dispersion extension. If no extension was found
|
511 |
then create one. We do not yet set any data as we don't
|
512 |
know the dimensions of the table yet.
|
513 |
|
514 |
Parameters:
|
515 |
None
|
516 |
Keywords:
|
517 |
None
|
518 |
"""
|
519 |
# Get extension. If it does not exist then create it now.
|
520 |
try:
|
521 |
self.hdu_edisp = self.edisp.table("ENERGY DISPERSION") |
522 |
except:
|
523 |
# Create binary table
|
524 |
table = GFitsBinTable() |
525 |
|
526 |
# Set extension name
|
527 |
table.extname("ENERGY DISPERSION")
|
528 |
|
529 |
# Set OGIP keywords
|
530 |
self.set_ogip_keywords(table, self.edisp_doc, \ |
531 |
["RESPONSE", self.edisp_name]) |
532 |
|
533 |
# Append table
|
534 |
self.edisp.append(table)
|
535 |
|
536 |
# Get extension
|
537 |
self.hdu_edisp = self.edisp.table("ENERGY DISPERSION") |
538 |
|
539 |
# Return
|
540 |
return
|
541 |
|
542 |
def open_bgd(self): |
543 |
"""
|
544 |
Open background extension. If no extension was found
|
545 |
then create one. We do not yet set any data as we don't
|
546 |
know the dimensions of the table yet.
|
547 |
|
548 |
Parameters:
|
549 |
None
|
550 |
Keywords:
|
551 |
None
|
552 |
"""
|
553 |
# Get extension. If it does not exist then create it now.
|
554 |
try:
|
555 |
self.hdu_bgd = self.bgd.table("BACKGROUND") |
556 |
except:
|
557 |
# Create binary table
|
558 |
table = GFitsBinTable() |
559 |
|
560 |
# Set extension name
|
561 |
table.extname("BACKGROUND")
|
562 |
|
563 |
# Set OGIP keywords
|
564 |
self.set_ogip_keywords(table, self.bgd_doc, \ |
565 |
["RESPONSE", self.bgd_name]) |
566 |
|
567 |
# Append table
|
568 |
self.bgd.append(table)
|
569 |
|
570 |
# Get extension
|
571 |
self.hdu_bgd = self.bgd.table("BACKGROUND") |
572 |
|
573 |
# Return
|
574 |
return
|
575 |
|
576 |
def set_ogip_keywords(self, hdu, hdudoc, hduclas): |
577 |
"""
|
578 |
Set standard OGIP keywords for extension.
|
579 |
|
580 |
Parameters:
|
581 |
hdu - Header Data Unit.
|
582 |
hdudoc - Documentation reference string
|
583 |
hduclas - Array of HDUCLAS fields
|
584 |
Keywords:
|
585 |
None
|
586 |
"""
|
587 |
# Set keywords
|
588 |
hdu.card("ORIGIN", "IRAP", "Name of organization making this file") |
589 |
hdu.card("DATE", self.utc, "File creation date (YYYY-MM-DDThh:mm:ss UTC)") |
590 |
hdu.card("TELESCOP", self.cal_tel, "Name of telescope") |
591 |
hdu.card("INSTRUME", self.cal_inst, "Name of instrument") |
592 |
hdu.card("DETNAM", self.cal_det, "Name of detector") |
593 |
hdu.card("HDUCLASS", "OGIP", "") |
594 |
hdu.card("HDUDOC", hdudoc, "") |
595 |
for i, item in enumerate(hduclas): |
596 |
key = "HDUCLAS%d" % (i+1) |
597 |
hdu.card(key, item, "")
|
598 |
hdu.card("HDUVERS", "1.0.0", "") |
599 |
|
600 |
# Return
|
601 |
return
|
602 |
|
603 |
def make_2D(self, array, hdu, name, unit, scale=1.0): |
604 |
"""
|
605 |
Make 2D matrix as function of energy and offset angle from a
|
606 |
ROOT 2D histogram. If the HDU has already the energy and
|
607 |
offset angle columns, this method will simply add another data
|
608 |
column.
|
609 |
If name==None, the method will not append any data column.
|
610 |
|
611 |
Parameters:
|
612 |
array - ROOT 2D histogram.
|
613 |
hdu - FITS HDU.
|
614 |
name - Data column name.
|
615 |
unit - Data unit.
|
616 |
Keywords:
|
617 |
scale - Scaling factor for histogram values.
|
618 |
"""
|
619 |
# Extract energy and offset angle vectors
|
620 |
energies = array.GetXaxis() |
621 |
offsets = array.GetYaxis() |
622 |
neng = energies.GetNbins() |
623 |
noffset = offsets.GetNbins() |
624 |
|
625 |
# Attach columns to HDU. We do this only if they do not yet
|
626 |
# exist. This allows adding columns to the same HDU in successive
|
627 |
# calls of this method
|
628 |
#
|
629 |
# ENERG_LO
|
630 |
if not hdu.hascolumn("ENERG_LO"): |
631 |
hdu.append_column(GFitsTableFloatCol("ENERG_LO", 1, neng)) |
632 |
hdu["ENERG_LO"].unit("TeV") |
633 |
for ieng in range(neng): |
634 |
e_lo = pow(10.0, energies.GetBinLowEdge(ieng+1)) |
635 |
hdu["ENERG_LO"][0,ieng] = e_lo |
636 |
#
|
637 |
# ENERG_HI
|
638 |
if not hdu.hascolumn("ENERG_HI"): |
639 |
hdu.append_column(GFitsTableFloatCol("ENERG_HI", 1, neng)) |
640 |
hdu["ENERG_HI"].unit("TeV") |
641 |
for ieng in range(neng): |
642 |
e_hi = pow(10.0, energies.GetBinUpEdge(ieng+1)) |
643 |
hdu["ENERG_HI"][0,ieng] = e_hi |
644 |
#
|
645 |
# THETA_LO
|
646 |
if not hdu.hascolumn("THETA_LO"): |
647 |
hdu.append_column(GFitsTableFloatCol("THETA_LO", 1, noffset)) |
648 |
hdu["THETA_LO"].unit("deg") |
649 |
for ioff in range(noffset): |
650 |
o_lo = offsets.GetBinLowEdge(ioff+1)
|
651 |
hdu["THETA_LO"][0,ioff] = o_lo |
652 |
|
653 |
#
|
654 |
# THETA_LO
|
655 |
if not hdu.hascolumn("THETA_HI"): |
656 |
hdu.append_column(GFitsTableFloatCol("THETA_HI", 1, noffset)) |
657 |
hdu["THETA_HI"].unit("deg") |
658 |
for ioff in range(noffset): |
659 |
o_hi = offsets.GetBinUpEdge(ioff+1)
|
660 |
hdu["THETA_HI"][0,ioff] = o_hi |
661 |
#
|
662 |
# "NAME"
|
663 |
if name != None and not hdu.hascolumn(name): |
664 |
hdu.append_column(GFitsTableFloatCol(name, 1, neng*noffset))
|
665 |
hdu[name].unit(unit) |
666 |
hdu[name].dim([neng, noffset]) |
667 |
for ioff in range(noffset): |
668 |
for ieng in range(neng): |
669 |
index = ieng + ioff * neng |
670 |
value = array.GetBinContent(ieng+1,ioff+1) |
671 |
hdu[name][0,index] = value * scale
|
672 |
|
673 |
# Collect boundary information
|
674 |
bd_eng = "ENERG(%.4f-%.2f)TeV" % \
|
675 |
(pow(10.0, energies.GetBinLowEdge(1)), \ |
676 |
pow(10.0, energies.GetBinUpEdge(neng))) |
677 |
bd_off = "THETA(%.2f-%.2f)deg" % \
|
678 |
(offsets.GetBinLowEdge(1), \
|
679 |
offsets.GetBinUpEdge(noffset)) |
680 |
bd_phi = "PHI(0-360)deg"
|
681 |
bounds = [bd_eng, bd_off, bd_phi] |
682 |
|
683 |
# Return boundary information
|
684 |
return bounds
|
685 |
|
686 |
def root2caldb(self, filename): |
687 |
"""
|
688 |
Translate ROOT to CALDB information.
|
689 |
|
690 |
Parameters:
|
691 |
filename - ROOT 2D performance filename.
|
692 |
"""
|
693 |
# Open ROOT performance file
|
694 |
file = TFile(filename) |
695 |
|
696 |
# Create effective area
|
697 |
self.root2ea(file) |
698 |
|
699 |
# Create point spread function
|
700 |
self.root2psf(file) |
701 |
|
702 |
# Create energy dispersion
|
703 |
self.root2edisp(file) |
704 |
|
705 |
# Create background
|
706 |
self.root2bgd(file) |
707 |
|
708 |
# Return
|
709 |
return
|
710 |
|
711 |
def root2ea(self, file): |
712 |
"""
|
713 |
Translate ROOT to CALDB effective area extension. The following ROOT
|
714 |
histograms are used:
|
715 |
|
716 |
EffectiveAreaEtrue_offaxis -> EFFAREA
|
717 |
EffectiveArea_offaxis -> EFFAREA_RECO
|
718 |
|
719 |
Parameters:
|
720 |
file - ROOT file.
|
721 |
Keywords:
|
722 |
None
|
723 |
"""
|
724 |
# Continue only if effective area HDU has been opened
|
725 |
if self.hdu_ea != None: |
726 |
|
727 |
# Allocate ROOT 2D arrays
|
728 |
etrue = TH2F() |
729 |
ereco = TH2F() |
730 |
file.GetObject("EffectiveAreaEtrue_offaxis", etrue) |
731 |
file.GetObject("EffectiveArea_offaxis", ereco) |
732 |
#print etrue.GetXaxis().GetBinLowEdge(1), etrue.GetXaxis().GetBinUpEdge(etrue.GetXaxis().GetNbins())
|
733 |
#print ereco.GetXaxis().GetBinLowEdge(1), ereco.GetXaxis().GetBinUpEdge(ereco.GetXaxis().GetNbins())
|
734 |
|
735 |
# Rebin etrue histogram
|
736 |
etrue.RebinX(10)
|
737 |
|
738 |
# Write boundaries (use Ereco boundaries)
|
739 |
bounds = self.make_2D(ereco, self.hdu_ea, None, "m2") |
740 |
for b in bounds: |
741 |
self.ea_bounds.append(b)
|
742 |
self.set_cif_keywords(self.hdu_ea, self.ea_name, \ |
743 |
self.ea_bounds, self.ea_desc) |
744 |
|
745 |
# EFFAREA
|
746 |
self.make_2D(etrue, self.hdu_ea, "EFFAREA", "m2") |
747 |
|
748 |
# EFFAREA_RECO
|
749 |
self.make_2D(ereco, self.hdu_ea, "EFFAREA_RECO", "m2") |
750 |
|
751 |
# Return
|
752 |
return
|
753 |
|
754 |
def root2psf(self, file): |
755 |
"""
|
756 |
Translate ROOT to CALDB point spread function extension. The following
|
757 |
ROOT histograms are used:
|
758 |
|
759 |
1/(2*pi*SIGMA_1) -> SCALE
|
760 |
AngRes_offaxis -> SIGMA_1 (scaling: 1/0.8)
|
761 |
0.0 -> AMPL_2
|
762 |
0.0 -> SIGMA_2
|
763 |
0.0 -> AMPL_3
|
764 |
0.0 -> SIGMA_3
|
765 |
|
766 |
Parameters:
|
767 |
file - ROOT file.
|
768 |
Keywords:
|
769 |
None
|
770 |
"""
|
771 |
# Continue only if point spread function HDU has been opened
|
772 |
if self.hdu_psf != None: |
773 |
|
774 |
# Allocate ROOT 2D array
|
775 |
r68 = TH2F() |
776 |
|
777 |
# Read 68% containment histogram
|
778 |
file.GetObject("AngRes_offaxis", r68) |
779 |
neng = r68.GetXaxis().GetNbins() |
780 |
noffset = r68.GetYaxis().GetNbins() |
781 |
|
782 |
# Compute scale histogram
|
783 |
scale = r68.Clone() |
784 |
for ioff in range(noffset): |
785 |
for ieng in range(neng): |
786 |
integral = 2.0 * math.pi * r68.GetBinContent(ieng+1,ioff+1) |
787 |
if integral > 0.0: |
788 |
value = 1.0 / integral
|
789 |
else:
|
790 |
value = 0.0
|
791 |
scale.SetBinContent(ieng+1,ioff+1,value) |
792 |
|
793 |
# Set zero histogram
|
794 |
zero = r68.Clone() |
795 |
for ioff in range(noffset): |
796 |
for ieng in range(neng): |
797 |
zero.SetBinContent(ieng+1,ioff+1,0.0) |
798 |
|
799 |
# Set boundaries
|
800 |
bounds = self.make_2D(r68, self.hdu_psf, None, "deg") |
801 |
for b in bounds: |
802 |
self.psf_bounds.append(b)
|
803 |
self.set_cif_keywords(self.hdu_psf, self.psf_name, \ |
804 |
self.psf_bounds, self.psf_desc) |
805 |
|
806 |
# SCALE
|
807 |
self.make_2D(scale, self.hdu_psf, "SCALE", "") |
808 |
|
809 |
# SIGMA_1
|
810 |
self.make_2D(r68, self.hdu_psf, "SIGMA_1", "deg") |
811 |
|
812 |
# AMPL_2
|
813 |
self.make_2D(zero, self.hdu_psf, "AMPL_2", "") |
814 |
|
815 |
# SIGMA_2
|
816 |
self.make_2D(zero, self.hdu_psf, "SIGMA_2", "deg") |
817 |
|
818 |
# AMPL_3
|
819 |
self.make_2D(zero, self.hdu_psf, "AMPL_3", "") |
820 |
|
821 |
# SIGMA_3
|
822 |
self.make_2D(zero, self.hdu_psf, "SIGMA_3", "deg") |
823 |
|
824 |
# Return
|
825 |
return
|
826 |
|
827 |
def root2edisp(self, file): |
828 |
"""
|
829 |
Translate ROOT to CALDB energy dispersion extension. The following ROOT
|
830 |
histograms are used:
|
831 |
|
832 |
ERes_offaxis -> ERES
|
833 |
Ebias_offaxis -> EBIAS
|
834 |
|
835 |
Parameters:
|
836 |
file - ROOT file.
|
837 |
Keywords:
|
838 |
None
|
839 |
"""
|
840 |
# Continue only if energy dispersion HDU has been opened
|
841 |
if self.hdu_edisp != None: |
842 |
|
843 |
# Allocate ROOT 2D array
|
844 |
eres = TH2F() |
845 |
ebias = TH2F() |
846 |
file.GetObject("ERes_offaxis", eres) |
847 |
file.GetObject("Ebias_offaxis", ebias) |
848 |
|
849 |
# Set boundaries
|
850 |
bounds = self.make_2D(eres, self.hdu_edisp, None, "deg") |
851 |
for b in bounds: |
852 |
self.edisp_bounds.append(b)
|
853 |
self.set_cif_keywords(self.hdu_edisp, self.edisp_name, \ |
854 |
self.edisp_bounds, self.edisp_desc) |
855 |
|
856 |
# ERES
|
857 |
self.make_2D(eres, self.hdu_edisp, "ERES", "Ereco/Etrue") |
858 |
|
859 |
# EBIAS
|
860 |
self.make_2D(ebias, self.hdu_edisp, "EBIAS", "Ereco/Etrue") |
861 |
|
862 |
# Return
|
863 |
return
|
864 |
|
865 |
def root2bgd(self, file): |
866 |
"""
|
867 |
Translate ROOT to CALDB background extension. The following ROOT
|
868 |
histograms are used:
|
869 |
|
870 |
BGRatePerSqDeg_offaxis -> BGD
|
871 |
BGRatePerSqDeg_offaxis -> BGD_RECO
|
872 |
|
873 |
Parameters:
|
874 |
file - ROOT file.
|
875 |
Keywords:
|
876 |
None
|
877 |
"""
|
878 |
# Continue only if background HDU has been opened
|
879 |
if self.hdu_bgd != None: |
880 |
|
881 |
# Allocate ROOT 2D array
|
882 |
array = TH2F() |
883 |
file.GetObject("BGRatePerSqDeg_offaxis", array) |
884 |
|
885 |
# Set boundaries
|
886 |
bounds = self.make_2D(array, self.hdu_bgd, None, "deg") |
887 |
for b in bounds: |
888 |
self.bgd_bounds.append(b)
|
889 |
self.set_cif_keywords(self.hdu_bgd, self.bgd_name, \ |
890 |
self.bgd_bounds, self.bgd_desc) |
891 |
|
892 |
# BGD
|
893 |
self.make_2D(array, self.hdu_bgd, "BGD", "") |
894 |
|
895 |
# BGD_RECO
|
896 |
self.make_2D(array, self.hdu_bgd, "BGD_RECO", "") |
897 |
|
898 |
# Return
|
899 |
return
|
900 |
|
901 |
def set_cif_keywords(self, hdu, name, bounds,desc): |
902 |
"""
|
903 |
Set standard CIF keywords for extension.
|
904 |
"""
|
905 |
# Set standard CIF keywords
|
906 |
hdu.card("CSYSNAME", "XMA_POL", "") |
907 |
hdu.card("CCLS0001", self.cal_class, "Calibration class") |
908 |
hdu.card("CDTP0001", self.cal_type, "Calibration type") |
909 |
hdu.card("CCNM0001", name, "Calibration name") |
910 |
|
911 |
# Set boundary keywords
|
912 |
for n in range(9): |
913 |
keyname = "CBD%d0001" % (n+1) |
914 |
if n >= len(bounds): |
915 |
value = "NONE"
|
916 |
else:
|
917 |
value = bounds[n] |
918 |
hdu.card(keyname, value, "Calibration boundary")
|
919 |
|
920 |
# Set validity keywords
|
921 |
hdu.card("CVSD0001", self.val_date, "Calibration validity start date (UTC)") |
922 |
hdu.card("CVST0001", self.val_time, "Calibration validity start time (UTC)") |
923 |
hdu.card("CDEC0001", desc, "Calibration description") |
924 |
|
925 |
# Return
|
926 |
return
|
927 |
|
928 |
|
929 |
#==========================#
|
930 |
# Main routine entry point #
|
931 |
#==========================#
|
932 |
if __name__ == '__main__': |
933 |
"""
|
934 |
This script translates a CTA ROOT 2D performance files in a CALDB compliant
|
935 |
CTA response. The script will create the response for 3 observations to
|
936 |
illustrate the run-wise file structure.
|
937 |
|
938 |
The CTA ROOT 2D performance files contain the following histograms:
|
939 |
- DiffSens_offaxis (2D) - Differential sensitivity
|
940 |
- EffectiveArea_offaxis (2D) - Effective area (full containment?, reco energy?)
|
941 |
- EffectiveArea80_offaxis (2D) - Effective area for 80% source containment
|
942 |
- AngRes_offaxis (2D) - Angular resolution 68% containment
|
943 |
- AngRes80_offaxis (2D) - Angular resolution 80% containment
|
944 |
- BGRatePerSqDeg_offaxis (2D) - Background rate per square degree
|
945 |
- BGRate_offaxis (2D) - Background rate
|
946 |
- EffectiveAreaEtrue_offaxis (2D) - Effective area in true energy (finer bins)
|
947 |
- MigMatrix_offaxis (3D)
|
948 |
- EestOverEtrue_offaxis (3D)
|
949 |
- ERes_offaxis (2D) - Energy Resolution
|
950 |
- Ebias_offaxis (2D) - Energy bias
|
951 |
"""
|
952 |
# Set ROOT filename
|
953 |
path = "/Users/jurgen/Documents/Travail/Projects/CTA/WP-MC/root/IFAEOffaxisPerformanceBEI_May2012"
|
954 |
irf = "SubarrayE_IFAE_50hours_20120510_offaxis.root"
|
955 |
filename = path+"/"+irf
|
956 |
|
957 |
# Set observation identifiers
|
958 |
obsids = ["000001", "000002", "000003"] |
959 |
|
960 |
# Generate CALDB entries, one for each observation (or run)
|
961 |
for obsid in obsids: |
962 |
|
963 |
# Allocate caldb
|
964 |
db = caldb("E", obsid)
|
965 |
|
966 |
# Open calibration
|
967 |
db.open("test")
|
968 |
|
969 |
# Translate ROOT to CALDB information
|
970 |
db.root2caldb(filename) |
971 |
|