summaryrefslogtreecommitdiff
path: root/hstwcs
diff options
context:
space:
mode:
Diffstat (limited to 'hstwcs')
-rw-r--r--hstwcs/__init__.py180
-rw-r--r--hstwcs/apply_corrections.py140
-rw-r--r--hstwcs/corrections.py149
-rw-r--r--hstwcs/dgeo.py252
-rw-r--r--hstwcs/makewcs.py245
5 files changed, 966 insertions, 0 deletions
diff --git a/hstwcs/__init__.py b/hstwcs/__init__.py
new file mode 100644
index 0000000..efa9287
--- /dev/null
+++ b/hstwcs/__init__.py
@@ -0,0 +1,180 @@
+import os
+import pyfits
+#from .. wcsutil import HSTWCS
+from hstwcs.wcsutil import HSTWCS
+
+#from .. mappings import allowed_corrections
+from hstwcs import utils
+import corrections, makewcs
+import dgeo
+import time
+from pytools import parseinput, fileutil
+import apply_corrections
+
+#Note: The order of corrections is important
+
+__docformat__ = 'restructuredtext'
+
+
+def updatewcs(input, vacorr=True, tddcorr=True, dgeocorr=True, checkfiles=True):
+ """
+ Purpose
+ =======
+ Applies corrections to the WCS keywords.
+
+ Example
+ =======
+ >>>from hstwcs import updatewcs
+ >>>updatewcs.updatewcs(filename)
+
+ Dependencies
+ ============
+ `pytools`
+ `pyfits`
+ `pywcs`
+ `numpy`
+
+ :Parameters:
+ `input`: a python list of file names or a string (wild card characters allowed)
+ input files may be in fits, geis or waiver fits format
+ `vacorr`: boolean
+ If True, vecocity aberration correction will be applied
+ `tddcorr`: boolean
+ If True, time dependent distortion correction will be applied
+ `checkfiles`: boolean
+ If True, the format of the input files will be checked,
+ geis and waiver fits files will be converted to MEF format.
+ Default value is True for standalone mode.
+ """
+
+ files = parseinput.parseinput(input)[0]
+ if checkfiles:
+ files = checkFiles(files)
+ if not files:
+ print 'No valid input, quitting ...\n'
+ return
+ for f in files:
+ acorr = apply_corrections.setCorrections(f, vacorr=vacorr, tddcorr=tddcorr,dgeocorr=dgeocorr)
+ #restore the original WCS keywords
+ utils.restoreWCS(f)
+ makecorr(f, acorr)
+ return files
+
+def makecorr(fname, allowed_corr):
+ """
+ Purpose
+ =======
+ Applies corrections to the WCS of a single file
+
+ :Parameters:
+ `fname`: string
+ file name
+ `acorr`: list
+ list of corrections to be applied
+
+ """
+ f = pyfits.open(fname, mode='update')
+ #Determine the reference chip and make a copy of its restored header.
+ nrefchip, nrefext = getNrefchip(f)
+ primhdr = f[0].header
+ ref_hdr = f[nrefext].header.copy()
+ utils.write_archive(ref_hdr)
+
+ for extn in f:
+ # Perhaps all ext headers should be corrected (to be consistent)
+ if extn.header.has_key('extname') and extn.header['extname'].lower() == 'sci':
+ ref_wcs = HSTWCS(primhdr, ref_hdr, fobj=f)
+ ref_wcs.readModel(update=True, header=ref_hdr)
+ hdr = extn.header
+ ext_wcs = HSTWCS(primhdr, hdr, fobj=f)
+ utils.write_archive(hdr)
+ ext_wcs.readModel(update=True,header=hdr)
+ for c in allowed_corr:
+ if c != 'DGEOCorr':
+ corr_klass = corrections.__getattribute__(c)
+ kw2update = corr_klass.updateWCS(ext_wcs, ref_wcs)
+ for kw in kw2update:
+ hdr.update(kw, kw2update[kw])
+
+ if 'DGEOCorr' in allowed_corr:
+ kw2update = dgeo.DGEOCorr.updateWCS(f)
+ for kw in kw2update:
+ f[1].header.update(kw, kw2update[kw])
+
+ f.close()
+
+def getNrefchip(fobj):
+ """
+ This handles the fact that WFPC2 subarray observations
+ may not include chip 3 which is the default reference chip for
+ full observations. Also for subarrays chip 3 may not be the third
+ extension in a MEF file.
+ """
+ Nrefext = 1
+ instrument = fobj[0].header['INSTRUME']
+ if instrument == 'WFPC2':
+ detectors = [img.header['DETECTOR'] for img in fobj[1:]]
+
+ if 3 not in detectors:
+ Nrefchip=detectors[0]
+ Nrefext = 1
+ else:
+ Nrefchip = 3
+ Nrefext = detectors.index(3) + 1
+ elif instrument == 'ACS':
+ detector = fobj[0].header['DETECTOR']
+ if detector == 'WCS':
+ Nrefchip =2
+ else:
+ Nrefchip = 1
+ elif instrument == 'NICMOS':
+ Nrefchip = fobj[0].header['CAMERA']
+ return Nrefchip, Nrefext
+
+def checkFiles(input):
+ """
+ Purpose
+ =======
+ Checks that input files are in the correct format.
+ Converts geis and waiver fits files to multietension fits.
+ """
+ from pytools.check_files import geis2mef, waiver2mef
+ removed_files = []
+ newfiles = []
+ for file in input:
+ try:
+ imgfits,imgtype = fileutil.isFits(file)
+ except IOError:
+ print "Warning: File %s could not be found\n" %file
+ print "Removing file %s from input list" %file
+ removed_files.append(file)
+ continue
+ # Check for existence of waiver FITS input, and quit if found.
+ # Or should we print a warning and continue but not use that file
+ if imgfits:
+ if imgtype == 'waiver':
+ newfilename = waiver2mef(file, convert_dq=True)
+ if newfilename == None:
+ print "Removing file %s from input list - could not convert waiver to mef" %file
+ removed_files.append(file)
+ else:
+ newfiles.append(newfilename)
+ else:
+ newfiles.append(file)
+
+ # If a GEIS image is provided as input, create a new MEF file with
+ # a name generated using 'buildFITSName()'
+ # Convert the corresponding data quality file if present
+ if not imgfits:
+ newfilename = geis2mef(file, convert_dq=True)
+ if newfilename == None:
+ print "Removing file %s from input list - could not convert geis to mef" %file
+ removed_files.append(file)
+ else:
+ newfiles.append(newfilename)
+ if removed_files:
+ print 'The following files will be removed from the list of files to be processed :\n'
+ for f in removed_files:
+ print f
+ return newfiles
+
diff --git a/hstwcs/apply_corrections.py b/hstwcs/apply_corrections.py
new file mode 100644
index 0000000..9c7f22a
--- /dev/null
+++ b/hstwcs/apply_corrections.py
@@ -0,0 +1,140 @@
+import os
+import pyfits
+#import allowed_corrections
+import time
+from pytools import fileutil
+import os.path
+#Note: The order of corrections is important
+
+__docformat__ = 'restructuredtext'
+
+# A dictionary which lists the allowed corrections for each instrument.
+# These are the default corrections applied also in the pipeline.
+
+allowed_corrections={'WFPC2': ['MakeWCS','CompSIP', 'VACorr', 'DGEOCorr'],
+ 'ACS': ['TDDCorr', 'MakeWCS', 'CompSIP','VACorr', 'DGEOCorr']
+ }
+
+def setCorrections(fname, vacorr=True, tddcorr=True, dgeocorr=True):
+ """
+ Purpose
+ =======
+ Creates a list of corrections to be applied to a file.
+ based on user input paramters and allowed corrections
+ for the instrument, which are defined in mappings.py.
+ """
+ instrument = pyfits.getval(fname, 'INSTRUME')
+ tddcorr = applyTDDCorr(fname, tddcorr)
+ dgeocorr = applyDgeoCorr(fname, dgeocorr)
+ # make a copy of this list !
+ acorr = allowed_corrections[instrument][:]
+ if 'VACorr' in acorr and vacorr==False: acorr.remove('VACorr')
+ if 'TDDCorr' in acorr and tddcorr==False: acorr.remove('TDDCorr')
+ if 'DGEOCorr' in acorr and dgeocorr==False: acorr.remove('DGEOCorr')
+ return acorr
+
+def applyTDDCorr(fname, utddcorr):
+ """
+ The default value of tddcorr for all ACS images is True.
+ This correction will be performed if all conditions below are True:
+ - the user did not turn it off on the command line
+ - the detector is WFC
+ - the idc table specified in the primary header is available.
+ """
+
+ instrument = pyfits.getval(fname, 'INSTRUME')
+ try:
+ detector = pyfits.getval(fname, 'DETECTOR')
+ except KeyError:
+ detector = None
+ try:
+ tddcorr = pyfits.getval(fname, 'TDDCORR')
+ if tddcorr == 'PERFORM':
+ tddcorr = True
+ else:
+ tddcorr = False
+ except KeyError:
+ tddcorr = None
+
+ if instrument == 'ACS' and detector == 'WFC' and tddcorr== True and utddcorr == True:
+ tddcorr = True
+ try:
+ idctab = pyfits.getval(fname, 'IDCTAB')
+ except KeyError:
+ tddcorr = False
+ #print "***IDCTAB keyword not found - not applying TDD correction***\n"
+ if os.path.exists(fileutil.osfn(idctab)):
+ tddcorr = True
+ else:
+ tddcorr = False
+ #print "***IDCTAB file not found - not applying TDD correction***\n"
+ else:
+ tddcorr = False
+
+ return tddcorr
+
+def applyDgeoCorr(fname, udgeocorr):
+ """
+ Purpose
+ =======
+ Adds dgeo extensions to files based on the DGEOFILE keyword in the primary
+ header. This is a default correction and will always run in the pipeline.
+ The file used to generate the extensions is
+ recorded in the DGEOFILE keyword in each science extension.
+ If 'DGEOFILE' in the primary header is different from 'DGEOFILE' in the
+ extension header and the file exists on disk and is a 'new type' dgeofile,
+ then the dgeo extensions will be updated.
+ """
+ applyDGEOCorr = True
+ try:
+ # get DGEOFILE kw from primary header
+ fdgeo0 = pyfits.getval(fname, 'DGEOFILE')
+ if fdgeo0 == 'N/A':
+ return False
+ fdgeo0 = fileutil.osfn(fdgeo0)
+ if not fileutil.findFile(fdgeo0):
+ print 'Kw DGEOFILE exists in primary header but file %s not found\n' % fdgeo0
+ print 'DGEO correction will not be applied\n'
+ applyDGEOCorr = False
+ return applyDGEOCorr
+ try:
+ # get DGEOFILE kw from first extension header
+ fdgeo1 = pyfits.getval(fname, 'DGEOFILE', ext=1)
+ fdgeo1 = fileutil.osfn(fdgeo1)
+ if fdgeo1 and fileutil.findFile(fdgeo1):
+ if fdgeo0 != fdgeo1:
+ applyDGEOCorr = True
+ else:
+ applyDGEOCorr = False
+ else:
+ # dgeo file defined in first extension may not be found
+ # but if a valid kw exists in the primary header, dgeo should be applied.
+ applyDGEOCorr = True
+ except KeyError:
+ # the case of DGEOFILE kw present in primary header but missing
+ # in first extension header
+ applyDGEOCorr = True
+ except KeyError:
+
+ print 'DGEOFILE keyword not found in primary header'
+ applyDGEOCorr = False
+
+ if isOldStyleDGEO(fname, fdgeo0):
+ applyDGEOCorr = False
+ return (applyDGEOCorr and udgeocorr)
+
+def isOldStyleDGEO(fname, dgname):
+ # checks if the file defined in a DGEOFILE kw is a full size
+ # (old style) image
+
+ sci_naxis1 = pyfits.getval(fname, 'NAXIS1', ext=1)
+ sci_naxis2 = pyfits.getval(fname, 'NAXIS2', ext=1)
+ dg_naxis1 = pyfits.getval(dgname, 'NAXIS1', ext=1)
+ dg_naxis2 = pyfits.getval(dgname, 'NAXIS2', ext=1)
+ if sci_naxis1 <= dg_naxis1 or sci_naxis2 <= dg_naxis2:
+ print 'Only full size (old style) XY file was found.'
+ print 'DGEO correction will not be applied.\n'
+ return True
+ else:
+ return False
+
diff --git a/hstwcs/corrections.py b/hstwcs/corrections.py
new file mode 100644
index 0000000..645e98d
--- /dev/null
+++ b/hstwcs/corrections.py
@@ -0,0 +1,149 @@
+import datetime
+import numpy
+from numpy import linalg
+from pytools import fileutil
+from hstwcs.utils import diff_angles
+import makewcs, dgeo
+
+MakeWCS = makewcs.MakeWCS
+DGEOCorr = dgeo.DGEOCorr
+
+class TDDCorr(object):
+ """
+ Purpose
+ =======
+ Apply time dependent distortion correction to SIP coefficients and basic
+ WCS keywords. It is applicable only to ACS/WFC data.
+
+ Ref: Jay Anderson, ACS ISR 2007-08, Variation of the Distortion
+ Solution of the WFC
+
+ :Parameters:
+ `ext_wcs`: HSTWCS object
+ An extension HSTWCS object to be modified
+ `ref_wcs`: HSTWCS object
+ A reference HSTWCS object
+ """
+
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ """
+ - Calculates alpha and beta for ACS/WFC data.
+ - Writes 2 new kw to the extension header: TDDALPHA and TDDBETA
+ """
+
+ alpha, beta = cls.compute_alpha_beta(ext_wcs)
+ cls.apply_tdd2idc(ref_wcs, alpha, beta)
+ cls.apply_tdd2idc(ext_wcs, alpha, beta)
+ ext_wcs.idcmodel.refpix['TDDALPHA'] = alpha
+ ext_wcs.idcmodel.refpix['TDDBETA'] = beta
+ ref_wcs.idcmodel.refpix['TDDALPHA'] = alpha
+ ref_wcs.idcmodel.refpix['TDDBETA'] = beta
+
+ newkw = {'TDDALPHA': alpha, 'TDDBETA':beta, 'OCX10':ext_wcs.idcmodel.cx[1,0],
+ 'OCX11':ext_wcs.idcmodel.cx[1,1],'OCY10':ext_wcs.idcmodel.cy[1,0],
+ 'OCY11':ext_wcs.idcmodel.cy[1,1],}
+
+ return newkw
+ updateWCS = classmethod(updateWCS)
+
+ def apply_tdd2idc(cls, hwcs, alpha, beta):
+ """
+ Applies TDD to the idctab coefficients of a ACS/WFC observation.
+ This should be always the first correction.
+ """
+ theta_v2v3 = 2.234529
+ mrotp = fileutil.buildRotMatrix(theta_v2v3)
+ mrotn = fileutil.buildRotMatrix(-theta_v2v3)
+ tdd_mat = numpy.array([[1+(beta/2048.), alpha/2048.],[alpha/2048.,1-(beta/2048.)]],numpy.float64)
+ abmat1 = numpy.dot(tdd_mat, mrotn)
+ abmat2 = numpy.dot(mrotp,abmat1)
+ xshape, yshape = hwcs.idcmodel.cx.shape, hwcs.idcmodel.cy.shape
+ icxy = numpy.dot(abmat2,[hwcs.idcmodel.cx.ravel(), hwcs.idcmodel.cy.ravel()])
+ hwcs.idcmodel.cx = icxy[0]
+ hwcs.idcmodel.cy = icxy[1]
+ hwcs.idcmodel.cx.shape = xshape
+ hwcs.idcmodel.cy.shape = yshape
+
+ apply_tdd2idc = classmethod(apply_tdd2idc)
+
+ def compute_alpha_beta(cls, ext_wcs):
+ """
+ Compute the time dependent distortion skew terms
+ default date of 2004.5 = 2004-7-1
+ """
+ dday = 2004.5
+ year,month,day = ext_wcs.date_obs.split('-')
+ rdate = datetime.datetime(int(year),int(month),int(day))
+ rday = float(rdate.strftime("%j"))/365.25 + rdate.year
+ alpha = 0.095 + 0.090*(rday-dday)/2.5
+ beta = -0.029 - 0.030*(rday-dday)/2.5
+
+ return alpha, beta
+
+ compute_alpha_beta = classmethod(compute_alpha_beta)
+
+
+class VACorr(object):
+ """
+ Purpose
+ =======
+ Apply velocity aberation correction to WCS keywords.
+ Modifies the CD matrix and CRVAL1/2
+
+ """
+
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ if ext_wcs.vafactor != 1:
+ ext_wcs.wcs.cd = ext_wcs.wcs.cd * ext_wcs.vafactor
+ ext_wcs.wcs.crval[0] = ref_wcs.wcs.crval[0] + ext_wcs.vafactor*diff_angles(ext_wcs.wcs.crval[0], ref_wcs.wcs.crval[0])
+ ext_wcs.wcs.crval[1] = ref_wcs.wcs.crval[1] + ext_wcs.vafactor*diff_angles(ext_wcs.wcs.crval[1], ref_wcs.wcs.crval[1])
+ ext_wcs.wcs.set()
+ else:
+ pass
+ kw2update={'CD1_1': ext_wcs.wcs.cd[0,0], 'CD1_2':ext_wcs.wcs.cd[0,1],
+ 'CD2_1':ext_wcs.wcs.cd[1,0], 'CD2_2':ext_wcs.wcs.cd[1,1],
+ 'CRVAL1':ext_wcs.wcs.crval[0], 'CRVAL2':ext_wcs.wcs.crval[1]}
+ return kw2update
+
+ updateWCS = classmethod(updateWCS)
+
+
+class CompSIP(object):
+ """
+ Purpose
+ =======
+ Compute SIP coefficients from idc table coefficients.
+ """
+
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ kw2update = {}
+ order = ext_wcs.idcmodel.norder
+ kw2update['A_ORDER'] = order
+ kw2update['B_ORDER'] = order
+ pscale = ext_wcs.idcmodel.refpix['PSCALE']
+
+ cx = ext_wcs.idcmodel.cx
+ cy = ext_wcs.idcmodel.cy
+
+ matr = numpy.array([[cx[1,1],cx[1,0]], [cy[1,1],cy[1,0]]], dtype=numpy.float)
+ imatr = linalg.inv(matr)
+ akeys1 = numpy.zeros((order+1,order+1), dtype=numpy.float)
+ bkeys1 = numpy.zeros((order+1,order+1), dtype=numpy.float)
+ for n in range(order+1):
+ for m in range(order+1):
+ if n >= m and n>=2:
+ idcval = numpy.array([[cx[n,m]],[cy[n,m]]])
+ sipval = numpy.dot(imatr, idcval)
+ akeys1[m,n-m] = sipval[0]
+ bkeys1[m,n-m] = sipval[1]
+ Akey="A_%d_%d" % (m,n-m)
+ Bkey="B_%d_%d" % (m,n-m)
+ kw2update[Akey] = sipval[0,0]
+ kw2update[Bkey] = sipval[1,0]
+ kw2update['CTYPE1'] = 'RA---TAN-SIP'
+ kw2update['CTYPE2'] = 'DEC--TAN-SIP'
+ return kw2update
+
+ updateWCS = classmethod(updateWCS)
+
+
diff --git a/hstwcs/dgeo.py b/hstwcs/dgeo.py
new file mode 100644
index 0000000..70e6a98
--- /dev/null
+++ b/hstwcs/dgeo.py
@@ -0,0 +1,252 @@
+import pyfits
+from pytools import fileutil
+#from hstwcs.mappings import dgeo_vals
+import numpy
+
+class DGEOCorr(object):
+ """
+ Purpose
+ =======
+ Defines a Lookup table prior distortion correction as per WCS paper IV.
+ It uses a reference file defined by the DGEOFILE keyword in the primary header.
+
+ Algorithm
+ =========
+ - Using extensions in the reference file create a WCSDVARR extension
+ and add it to the file object.
+ - Add record-valued keywords which describe the lookup tables to the
+ science extension header
+ - Add a keyword 'DGEOFILE' to the science extension header, whose
+ value is the reference file used to create the WCSVARR extension
+
+ If WCSDVARR extensions exist, subsequent updates will overwrite them.
+ If not, they will be added to the file object.
+
+ It is assumed that the DGEO reference files were created to work with IDC tables
+ but will be applied with SIP coefficients. A transformation is applied to correct
+ for the fact that the lookup tables will be applied before the first order coefficients
+ which are in the CD matrix when the SIP convention is used.
+ """
+
+ def updateWCS(cls, fobj):
+ """
+ :Parameters:
+ `fobj`: pyfits object
+ Science file, for which a distortion correction in a DGEOFILE is available
+
+ """
+ assert isinstance(fobj, pyfits.NP_pyfits.HDUList)
+ cls.applyDgeoCorr(fobj)
+ dgfile = fobj[0].header['DGEOFILE']
+
+ new_kw = {'DGEOFILE': dgfile}
+ return new_kw
+
+ updateWCS = classmethod(updateWCS)
+
+ def applyDgeoCorr(cls, fobj):
+ """
+ For each science extension in a pyfits file object:
+ - create a WCSDVARR extension
+ - update science header
+ - add/update DGEOFILE keyword
+ """
+ dgfile = fileutil.osfn(fobj[0].header['DGEOFILE'])
+ instrument = fobj[0].header.get('INSTRUME', None)
+ # Map WCSDVARR EXTVER numbers to extension numbers
+ wcsdvarr_ind = cls.getWCSIndex(fobj)
+ for ext in fobj:
+ try:
+ extname = ext.header['EXTNAME'].lower()
+ except KeyError:
+ continue
+ if extname == 'sci':
+ extversion = ext.header['EXTVER']
+ header = ext.header
+ # get the data arrays from the reference file and transform them for use with SIP
+ dx,dy = cls.getData(dgfile, extversion)
+ ndx, ndy = cls.transformData(header, dx,dy)
+ # determine EXTVER for the WCSDVARR extension from the DGEO file (EXTNAME, EXTVER) kw
+ wcsdvarr_x_version = 2 * extversion -1
+ wcsdvarr_y_version = 2 * extversion
+
+ for ename in zip(['DX', 'DY'], [wcsdvarr_x_version,wcsdvarr_y_version],[ndx, ndy]):
+ cls.addSciExtKw(header, wdvarr_ver=ename[1], dgeo_name=ename[0])
+ hdu = cls.createDgeoHDU(header, dgeofile=dgfile, wdvarr_ver=ename[1],dgeo_name=ename[0], data=ename[2],extver=extversion)
+ if wcsdvarr_ind:
+ fobj[wcsdvarr_ind[ename[1]]] = hdu
+ else:
+ fobj.append(hdu)
+
+
+ applyDgeoCorr = classmethod(applyDgeoCorr)
+
+ def getWCSIndex(cls, fobj):
+ """
+ If fobj has WCSDVARR extensions:
+ returns a mapping of their EXTVER kw are mapped to extension numbers
+ if fobj does not have WCSDVARR extensions:
+ an empty dictionary is returned.
+ """
+ wcsd = {}
+ for e in range(len(fobj)):
+ try:
+ ename = fobj[e].header['EXTNAME']
+ except KeyError:
+ continue
+ if ename == 'WCSDVARR':
+ wcsd[fobj[e].header['EXTVER']] = e
+
+ return wcsd
+
+ getWCSIndex = classmethod(getWCSIndex)
+
+ def addSciExtKw(cls, hdr, wdvarr_ver=None, dgeo_name=None):
+ """
+ Adds kw to sci extension to define WCSDVARR lookup table extensions
+
+ """
+ if dgeo_name =='DX':
+ j=1
+ else:
+ j=2
+
+ cperror = 'CPERROR%s' %j
+ cpdis = 'CPDIS%s' %j
+ dpext = 'DP%s.' %j + 'EXTVER'
+ dpnaxes = 'DP%s.' %j +'NAXES'
+ dpaxis1 = 'DP%s.' %j+'AXIS.1'
+ dpaxis2 = 'DP%s.' %j+'AXIS.2'
+ keys = [cperror, cpdis, dpext, dpnaxes, dpaxis1, dpaxis2]
+ values = {cperror: 0.0, cpdis: 'Lookup', dpext: wdvarr_ver, dpnaxes: 2,
+ dpaxis1: 1, dpaxis2: 2}
+
+ comments = {cperror: 'Maximum error of dgeo correction for axis %s' % (wdvarr_ver/2),
+ cpdis: 'Prior distortion funcion type',
+ dpext: 'Version number of WCSDVARR extension containing lookup distortion table',
+ dpnaxes: 'Number of independent variables in distortion function',
+ dpaxis1: 'Axis number of the jth independent variable in a distortion function',
+ dpaxis2: 'Axis number of the jth independent variable in a distortion function'
+ }
+
+ for key in keys:
+ hdr.update(key=key, value=values[key], comment=comments[key], before='HISTORY')
+
+ addSciExtKw = classmethod(addSciExtKw)
+
+ def getData(cls,dgfile, extver):
+ """
+ Get the data arrays from the reference DGEO files
+ """
+ xdata = pyfits.getdata(dgfile, ext=('DX',extver))
+ ydata = pyfits.getdata(dgfile, ext=('DY',extver))
+ return xdata, ydata
+ getData = classmethod(getData)
+
+ def transformData(cls, header, dx, dy):
+ """
+ Transform the DGEO data arrays for use with SIP
+ """
+ coeffs = cls.getCoeffs(header)
+ idcscale = header['IDCSCALE']
+ sclcoeffs = numpy.linalg.inv(coeffs)/idcscale
+ ndx, ndy = numpy.dot(sclcoeffs, [dx.ravel(), dy.ravel()])
+ ndx.shape = dx.shape
+ ndy.shape=dy.shape
+ return ndx, ndy
+ transformData = classmethod(transformData)
+
+ def getCoeffs(cls, header):
+ """
+ Return a matrix of the first order IDC coefficients.
+ """
+ try:
+ ocx10 = header['OCX10']
+ ocx11 = header['OCX11']
+ ocy10 = header['OCY10']
+ ocy11 = header['OCY11']
+ except AttributeError:
+ print 'First order IDCTAB coefficients are not available.\n'
+ print 'Cannot convert SIP to IDC coefficients.\n'
+ return None
+ return numpy.array([[ocx11, ocx10], [ocy11,ocy10]], dtype=numpy.float32)
+
+ getCoeffs = classmethod(getCoeffs)
+
+ def createDgeoHDU(cls, sciheader, dgeofile=None, wdvarr_ver=1, dgeo_name=None,data = None, extver=1):
+ """
+ Creates an HDU to be added to the file object.
+ """
+ hdr = cls.createDgeoHdr(sciheader, dgeofile=dgeofile, wdvarr_ver=wdvarr_ver, dgeoname=dgeo_name, extver=extver)
+ hdu=pyfits.ImageHDU(header=hdr, data=data)
+ return hdu
+
+ createDgeoHDU = classmethod(createDgeoHDU)
+
+ def createDgeoHdr(cls, sciheader, dgeofile, wdvarr_ver, dgeoname, extver):
+ """
+ Creates a header for the WCSDVARR extension based on the DGEO reference file
+ and sci extension header.
+ """
+ dgeo_header = pyfits.getheader(dgeofile, ext=(dgeoname,extver))
+ sci_naxis1 = sciheader['NAXIS1']
+ sci_naxis2 = sciheader['NAXIS2']
+ sci_crpix1 = sciheader['CRPIX1']
+ sci_crpix2 = sciheader['CRPIX2']
+
+ naxis1 = dgeo_header['naxis1']
+ naxis2 = dgeo_header['naxis2']
+ extver = dgeo_header['extver']
+ crpix1 = naxis1/2.
+ crpix2 = naxis2/2.
+ cdelt1 = sci_naxis1/naxis1
+ cdelt2 = sci_naxis2/naxis2
+ crval1 = sci_crpix1
+ crval2 = sci_crpix2
+ keys = ['XTENSION','BITPIX','NAXIS','NAXIS1','NAXIS2',
+ 'EXTNAME','EXTVER','PCOUNT','GCOUNT','CRPIX1',
+ 'CDELT1','CRVAL1','CRPIX2','CDELT2','CRVAL2']
+
+ comments = {'XTENSION': 'Image extension',
+ 'BITPIX': 'IEEE floating point',
+ 'NAXIS': 'Number of image columns',
+ 'NAXIS1': 'Number of image columns',
+ 'NAXIS2': 'Number of image rows',
+ 'EXTNAME': 'WCS distortion array',
+ 'EXTVER': 'Distortion array version number',
+ 'PCOUNT': 'Special data area of size 0',
+ 'GCOUNT': 'One data group',
+ 'CRPIX1': 'Distortion array reference pixel',
+ 'CDELT1': 'Grid step size in first coordinate',
+ 'CRVAL1': 'Image array pixel coordinate',
+ 'CRPIX2': 'Distortion array reference pixel',
+ 'CDELT2': 'Grid step size in second coordinate',
+ 'CRVAL2': 'Image array pixel coordinate'}
+
+ values = {'XTENSION': 'IMAGE',
+ 'BITPIX': -32,
+ 'NAXIS': 2,
+ 'NAXIS1': naxis1,
+ 'NAXIS2': naxis2,
+ 'EXTNAME': 'WCSDVARR',
+ 'EXTVER': wdvarr_ver,
+ 'PCOUNT': 0,
+ 'GCOUNT': 1,
+ 'CRPIX1': crpix1,
+ 'CDELT1': cdelt1,
+ 'CRVAL1': crval1,
+ 'CRPIX2': crpix1,
+ 'CDELT2': cdelt2,
+ 'CRVAL2': crval2
+ }
+
+
+ cdl = pyfits.CardList()
+ for c in keys:
+ cdl.append(pyfits.Card(key=c, value=values[c], comment=comments[c]))
+
+ hdr = pyfits.Header(cards=cdl)
+ return hdr
+
+ createDgeoHdr = classmethod(createDgeoHdr)
+
diff --git a/hstwcs/makewcs.py b/hstwcs/makewcs.py
new file mode 100644
index 0000000..40be750
--- /dev/null
+++ b/hstwcs/makewcs.py
@@ -0,0 +1,245 @@
+#from .. mappings import DEGTORAD, RADTODEG
+from hstwcs import DEGTORAD, RADTODEG
+import numpy
+from math import sin, sqrt, pow, cos, asin, atan2,pi
+from hstwcs import utils
+from pytools import fileutil
+
+class MakeWCS(object):
+ """
+ Purpose
+ =======
+ Recompute basic WCS keywords based on PA_V3 and distortion model.
+
+ Algorithm
+ =========
+ - update reference chip wcs
+
+ -- CRVAL: reference chip model zero point (XREF/YREF) on the sky
+ -- PA_V3 is calculated at the target position and adjusted
+ for each chip orientation
+ -- CD: PA_V3 and the model scale are used to cnstruct a CD matrix
+
+ - update extension wcs
+
+ -- CRVAL: - the distance between the zero points of the two
+ chip models on the sky
+ -- CD matrix: first order coefficients are added to the components
+ of this distance and transfered on the sky. The difference
+ between CRVAL and these vectors is the new CD matrix for each chip.
+ -- CRPIX: chip's model zero point in pixel space (XREF/YREF)
+
+ """
+ tdd_xyref = {1: [2048, 3072], 2:[2048, 1024]}
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ """
+ recomputes the basic WCS kw
+ """
+ ltvoff, offshift = cls.getOffsets(ext_wcs)
+
+ v23_corr = cls.zero_point_corr(ext_wcs)
+ rv23_corr = cls.zero_point_corr(ref_wcs)
+
+ cls.uprefwcs(ext_wcs, ref_wcs, rv23_corr, ltvoff, offshift)
+ cls.upextwcs(ext_wcs, ref_wcs, v23_corr, rv23_corr, ltvoff, offshift)
+
+
+ kw2update = {'CD1_1': ext_wcs.wcs.cd[0,0],
+ 'CD1_2': ext_wcs.wcs.cd[0,1],
+ 'CD2_1': ext_wcs.wcs.cd[1,0],
+ 'CD2_2': ext_wcs.wcs.cd[1,1],
+ 'CRVAL1': ext_wcs.wcs.crval[0],
+ 'CRVAL2': ext_wcs.wcs.crval[1],
+ 'CRPIX1': ext_wcs.wcs.crpix[0],
+ 'CRPIX2': ext_wcs.wcs.crpix[1],
+ }
+ return kw2update
+
+ updateWCS = classmethod(updateWCS)
+
+ def upextwcs(cls, ext_wcs, ref_wcs, v23_corr, rv23_corr, loff, offsh):
+ """
+ updates an extension wcs
+ """
+ ltvoffx, ltvoffy = loff[0], loff[1]
+ offshiftx, offshifty = offsh[0], offsh[1]
+ ltv1 = ext_wcs.ltv1
+ ltv2 = ext_wcs.ltv2
+
+ if ltv1 != 0. or ltv2 != 0.:
+ offsetx = backup_wcs['CRPIX1'] - ltv1 - ext_wcs.idcmodel.refpix['XREF']
+ offsety = backup_wcs['CRPIX2'] - ltv2 - ext_wcs.idcmodel.refpix['YREF']
+ fx,fy = ext_wcs.idcmodel.shift(ext_wcs.idcmodel.cx,ext_wcs.idcmodel.cy,offsetx,offsety)
+ else:
+ fx, fy = ext_wcs.idcmodel.cx, ext_wcs.idcmodel.cy
+
+ tddscale = (ref_wcs.pscale/fx[1,1])
+ v2 = ext_wcs.idcmodel.refpix['V2REF'] + v23_corr[0,0] * tddscale
+ v3 = ext_wcs.idcmodel.refpix['V3REF'] - v23_corr[1,0] * tddscale
+ v2ref = ref_wcs.idcmodel.refpix['V2REF'] + rv23_corr[0,0] * tddscale
+ v3ref = ref_wcs.idcmodel.refpix['V3REF'] - rv23_corr[1,0] * tddscale
+
+ R_scale = ref_wcs.idcmodel.refpix['PSCALE']/3600.0
+ off = sqrt((v2-v2ref)**2 + (v3-v3ref)**2)/(R_scale*3600.0)
+
+ if v3 == v3ref:
+ theta=0.0
+ else:
+ theta = atan2(ext_wcs.parity[0][0]*(v2-v2ref), ext_wcs.parity[1][1]*(v3-v3ref))
+
+ if ref_wcs.idcmodel.refpix['THETA']: theta += ref_wcs.idcmodel.refpix['THETA']*pi/180.0
+
+ dX=(off*sin(theta)) + offshiftx
+ dY=(off*cos(theta)) + offshifty
+
+ px = numpy.array([[dX,dY]])
+ newcrval = ref_wcs.wcs.p2s_fits(px)['world'][0]
+ newcrpix = numpy.array([ext_wcs.idcmodel.refpix['XREF'] + ltvoffx,
+ ext_wcs.idcmodel.refpix['YREF'] + ltvoffy])
+ ext_wcs.wcs.crval = newcrval
+ ext_wcs.wcs.crpix = newcrpix
+ ext_wcs.wcs.set()
+
+ # Create a small vector, in reference image pixel scale
+ delmat = numpy.array([[fx[1,1], fy[1,1]], \
+ [fx[1,0], fy[1,0]]]) / R_scale/3600.
+
+ # Account for subarray offset
+ # Angle of chip relative to chip
+ if ext_wcs.idcmodel.refpix['THETA']:
+ dtheta = ext_wcs.idcmodel.refpix['THETA'] - ref_wcs.idcmodel.refpix['THETA']
+ else:
+ dtheta = 0.0
+ # Convert to radians
+ rr=dtheta*pi/180.0
+ rrmat = fileutil.buildRotMatrix(rr)
+
+ # Rotate the vectors
+ dxy = numpy.dot(rrmat, delmat)
+ wc = ref_wcs.wcs.p2s_fits(px + dxy)['world']
+
+ # Calculate the new CDs and convert to degrees
+ cd11 = utils.diff_angles(wc[0,0],newcrval[0])*cos(newcrval[1]*pi/180.0)
+ cd12 = utils.diff_angles(wc[1,0],newcrval[0])*cos(newcrval[1]*pi/180.0)
+ cd21 = utils.diff_angles(wc[0,1],newcrval[1])
+ cd22 = utils.diff_angles(wc[1,1],newcrval[1])
+ cd = numpy.array([[cd11, cd12], [cd21, cd22]])
+ ext_wcs.wcs.cd = cd
+ ext_wcs.wcs.set()
+
+ upextwcs = classmethod(upextwcs)
+
+ def uprefwcs(cls, ext_wcs, ref_wcs, rv23_corr_tdd, loff, offsh):
+ """
+ Updates the reference chip
+ """
+ ltvoffx, ltvoffy = loff[0], loff[1]
+ offshift = offsh
+ dec = ref_wcs.wcs.crval[1]
+ tddscale = (ref_wcs.pscale/ext_wcs.idcmodel.cx[1,1])
+ rv23 = [ref_wcs.idcmodel.refpix['V2REF'] + (rv23_corr_tdd[0,0] *tddscale),
+ ref_wcs.idcmodel.refpix['V3REF'] - (rv23_corr_tdd[1,0] * tddscale)]
+ # Get an approximate reference position on the sky
+ rref = numpy.array([[ref_wcs.idcmodel.refpix['XREF']+ltvoffx ,
+ ref_wcs.idcmodel.refpix['YREF']+ltvoffy]])
+
+ crval = ref_wcs.wcs.p2s_fits(rref)['world'][0]
+ # Convert the PA_V3 orientation to the orientation at the aperture
+ # This is for the reference chip only - we use this for the
+ # reference tangent plane definition
+ # It has the same orientation as the reference chip
+ pv = troll(ext_wcs.pav3,dec,rv23[0], rv23[1])
+ # Add the chip rotation angle
+ if ref_wcs.idcmodel.refpix['THETA']:
+ pv += ref_wcs.idcmodel.refpix['THETA']
+
+
+ # Set values for the rest of the reference WCS
+ ref_wcs.wcs.crval = crval
+ ref_wcs.wcs.crpix = numpy.array([0.0,0.0])+offsh
+ parity = ref_wcs.parity
+ R_scale = ref_wcs.idcmodel.refpix['PSCALE']/3600.0
+ cd11 = parity[0][0] * cos(pv*pi/180.0)*R_scale
+ cd12 = parity[0][0] * -sin(pv*pi/180.0)*R_scale
+ cd21 = parity[1][1] * sin(pv*pi/180.0)*R_scale
+ cd22 = parity[1][1] * cos(pv*pi/180.0)*R_scale
+
+ rcd = numpy.array([[cd11, cd12], [cd21, cd22]])
+ ref_wcs.wcs.cd = rcd
+ ref_wcs.wcs.set()
+
+ uprefwcs = classmethod(uprefwcs)
+
+ def zero_point_corr(cls,hwcs):
+ try:
+ alpha = hwcs.idcmodel.refpix['TDDALPHA']
+ beta = hwcs.idcmodel.refpix['TDDBETA']
+ except KeyError:
+ return numpy.array([[0., 0.],[0.,0.]])
+
+ tdd = numpy.array([[beta, alpha], [alpha, -beta]])
+ mrotp = fileutil.buildRotMatrix(2.234529)/2048.
+ xy0 = numpy.array([[cls.tdd_xyref[hwcs.chip][0]-2048.], [cls.tdd_xyref[hwcs.chip][1]-2048.]])
+ v23_corr = numpy.dot(mrotp,numpy.dot(tdd,xy0)) * 0.05
+
+ return v23_corr
+
+ zero_point_corr = classmethod(zero_point_corr)
+
+ def getOffsets(cls, ext_wcs):
+ ltv1 = ext_wcs.ltv1
+ ltv2 = ext_wcs.ltv2
+
+ offsetx = ext_wcs.wcs.crpix[0] - ltv1 - ext_wcs.idcmodel.refpix['XREF']
+ offsety = ext_wcs.wcs.crpix[1] - ltv2 - ext_wcs.idcmodel.refpix['YREF']
+
+ shiftx = ext_wcs.idcmodel.refpix['XREF'] + ltv1
+ shifty = ext_wcs.idcmodel.refpix['YREF'] + ltv2
+ if ltv1 != 0. or ltv2 != 0.:
+ ltvoffx = ltv1 + offsetx
+ ltvoffy = ltv2 + offsety
+ offshiftx = offsetx + shiftx
+ offshifty = offsety + shifty
+ else:
+ ltvoffx = 0.
+ ltvoffy = 0.
+ offshiftx = 0.
+ offshifty = 0.
+
+ ltvoff = numpy.array([ltvoffx, ltvoffy])
+ offshift = numpy.array([offshiftx, offshifty])
+ return ltvoff, offshift
+
+ getOffsets = classmethod(getOffsets)
+
+
+def troll(roll, dec, v2, v3):
+ """ Computes the roll angle at the target position based on:
+ the roll angle at the V1 axis(roll),
+ the dec of the target(dec), and
+ the V2/V3 position of the aperture (v2,v3) in arcseconds.
+
+ Based on the algorithm provided by Colin Cox that is used in
+ Generic Conversion at STScI.
+ """
+ # Convert all angles to radians
+ _roll = DEGTORAD(roll)
+ _dec = DEGTORAD(dec)
+ _v2 = DEGTORAD(v2 / 3600.)
+ _v3 = DEGTORAD(v3 / 3600.)
+
+ # compute components
+ sin_rho = sqrt((pow(sin(_v2),2)+pow(sin(_v3),2)) - (pow(sin(_v2),2)*pow(sin(_v3),2)))
+ rho = asin(sin_rho)
+ beta = asin(sin(_v3)/sin_rho)
+ if _v2 < 0: beta = pi - beta
+ gamma = asin(sin(_v2)/sin_rho)
+ if _v3 < 0: gamma = pi - gamma
+ A = pi/2. + _roll - beta
+ B = atan2( sin(A)*cos(_dec), (sin(_dec)*sin_rho - cos(_dec)*cos(rho)*cos(A)))
+
+ # compute final value
+ troll = RADTODEG(pi - (gamma+B))
+
+ return troll
+