summaryrefslogtreecommitdiff
path: root/lib/stwcs/updatewcs
diff options
context:
space:
mode:
Diffstat (limited to 'lib/stwcs/updatewcs')
-rw-r--r--lib/stwcs/updatewcs/__init__.py396
-rw-r--r--lib/stwcs/updatewcs/apply_corrections.py223
-rw-r--r--lib/stwcs/updatewcs/corrections.py230
-rw-r--r--lib/stwcs/updatewcs/det2im.py200
-rw-r--r--lib/stwcs/updatewcs/makewcs.py251
-rw-r--r--lib/stwcs/updatewcs/npol.py319
-rw-r--r--lib/stwcs/updatewcs/utils.py28
7 files changed, 1647 insertions, 0 deletions
diff --git a/lib/stwcs/updatewcs/__init__.py b/lib/stwcs/updatewcs/__init__.py
new file mode 100644
index 0000000..53e1c21
--- /dev/null
+++ b/lib/stwcs/updatewcs/__init__.py
@@ -0,0 +1,396 @@
+from __future__ import division # confidence high
+
+import os
+import pyfits
+import numpy as np
+from stwcs import wcsutil
+from stwcs.wcsutil import HSTWCS
+from stwcs import __version__ as stwcsversion
+import pywcs
+
+import utils, corrections, makewcs
+import npol, det2im
+from stsci.tools import parseinput, fileutil
+import apply_corrections
+
+import time
+import logging
+logger = logging.getLogger('stwcs.updatewcs')
+
+import atexit
+atexit.register(logging.shutdown)
+
+#Note: The order of corrections is important
+
+__docformat__ = 'restructuredtext'
+
+def updatewcs(input, vacorr=True, tddcorr=True, npolcorr=True, d2imcorr=True,
+ checkfiles=True, wcskey=" ", wcsname=" ", clobber=False, verbose=False):
+ """
+
+ Updates HST science files with the best available calibration information.
+ This allows users to retrieve from the archive self contained science files
+ which do not require additional reference files.
+
+ Basic WCS keywords are updated in the process and new keywords (following WCS
+ Paper IV and the SIP convention) as well as new extensions are added to the science files.
+
+
+ Example
+ -------
+ >>>from stwcs import updatewcs
+ >>>updatewcs.updatewcs(filename)
+
+ Dependencies
+ ------------
+ `stsci.tools`
+ `pyfits`
+ `pywcs`
+
+ Parameters
+ ----------
+ input: a python list of file names or a string (wild card characters allowed)
+ input files may be in fits, geis or waiver fits format
+ vacorr: boolean
+ If True, vecocity aberration correction will be applied
+ tddcorr: boolean
+ If True, time dependent distortion correction will be applied
+ npolcorr: boolean
+ If True, a Lookup table distortion will be applied
+ d2imcorr: boolean
+ If True, detector to image correction will be applied
+ checkfiles: boolean
+ If True, the format of the input files will be checked,
+ geis and waiver fits files will be converted to MEF format.
+ Default value is True for standalone mode.
+ wcskey: None, one character string A-Z or an empty string of length 1
+ If None - the primary WCS is not archived
+ If an empty string - the next available wcskey is used for the archive
+ A-Z - use this key to archive the WCS
+ wcsname: a string
+ The name under which the primary WCS is archived after it is updated.
+ If an empty string (default), the name of the idctable is used as
+ a base.
+ clobber: boolean
+ a flag for reusing the wcskey when archiving the primary WCS
+ """
+ if verbose == False:
+ logger.setLevel(100)
+ else:
+ formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
+ log_filename = 'stwcs.log'
+ fh = logging.FileHandler(log_filename, mode='w')
+ fh.setLevel(logging.DEBUG)
+ fh.setFormatter(formatter)
+ logger.addHandler(fh)
+ logger.setLevel(verbose)
+ args = "vacorr=%s, tddcorr=%s, npolcorr=%s, d2imcorr=%s, checkfiles=%s, \
+ wcskey=%s, wcsname=%s, clobber=%s" % (str(vacorr), str(tddcorr), str(npolcorr),
+ str(d2imcorr), str(checkfiles), str(wcskey),
+ str(wcsname), str(clobber))
+ logger.info('\n\tStarting UPDATEWCS: %s', time.asctime())
+
+ files = parseinput.parseinput(input)[0]
+ logger.info("\n\tInput files: %s, " % [i for i in files])
+ logger.info("\n\tInput arguments: %s" %args)
+ if checkfiles:
+ files = checkFiles(files)
+ if not files:
+ print 'No valid input, quitting ...\n'
+ return
+
+ for f in files:
+ acorr = apply_corrections.setCorrections(f, vacorr=vacorr, \
+ tddcorr=tddcorr,npolcorr=npolcorr, d2imcorr=d2imcorr)
+ if 'MakeWCS' in acorr and newIDCTAB(f):
+ logger.warning("\n\tNew IDCTAB file detected. All current WCSs will be deleted")
+ cleanWCS(f)
+
+ #restore the original WCS keywords
+ makecorr(f, acorr, wkey=wcskey, wname=wcsname, clobber=False)
+ return files
+
+def makecorr(fname, allowed_corr, wkey=" ", wname=" ", clobber=False):
+ """
+ Purpose
+ =======
+ Applies corrections to the WCS of a single file
+
+ :Parameters:
+ `fname`: string
+ file name
+ `acorr`: list
+ list of corrections to be applied
+ `wkey`: None, one character string A-Z or an empty string of length 1
+ If None - the primary WCS is not archived
+ If an empty string - the next available wcskey is used for the archive
+ A-Z - use this key to archive the WCS
+ `wname`: a string
+ The name under which the primary WCS is archived after it is updated.
+ If an empty string (default), the name of the idctable is used as
+ a base.
+ `clobber`: boolean
+ a flag for reusing the wcskey when archiving the primary WCS
+ """
+ f = pyfits.open(fname, mode='update')
+ #restore the original WCS keywords
+ #wcsutil.restoreWCS(f, ext=[], wcskey='O', clobber=True)
+ #Determine the reference chip and create the reference HSTWCS object
+ nrefchip, nrefext = getNrefchip(f)
+ wcsutil.restoreWCS(f, nrefext, wcskey='O', clobber=True)
+ rwcs = HSTWCS(fobj=f, ext=nrefext)
+ rwcs.readModel(update=True,header=f[nrefext].header)
+
+ wcsutil.archiveWCS(f, nrefext, 'O', wcsname='OPUS', clobber=True)
+
+ if 'DET2IMCorr' in allowed_corr:
+ det2im.DET2IMCorr.updateWCS(f)
+
+ # get a wcskey and wcsname from the first extension header
+ idcname = fileutil.osfn(rwcs.idctab)
+ key, name = getKeyName(f[1].header, wkey, wname, idcname)
+
+ for i in range(len(f))[1:]:
+ extn = f[i]
+
+ if extn.header.has_key('extname'):
+ extname = extn.header['extname'].lower()
+ if extname == 'sci':
+ wcsutil.restoreWCS(f, ext=i, wcskey='O', clobber=True)
+ sciextver = extn.header['extver']
+ ref_wcs = rwcs.deepcopy()
+ hdr = extn.header
+ ext_wcs = HSTWCS(fobj=f, ext=i)
+ wcsutil.archiveWCS(f, ext=i, wcskey="O", wcsname="OPUS", clobber=True)
+ ext_wcs.readModel(update=True,header=hdr)
+ for c in allowed_corr:
+ if c != 'NPOLCorr' and c != 'DET2IMCorr':
+ corr_klass = corrections.__getattribute__(c)
+ kw2update = corr_klass.updateWCS(ext_wcs, ref_wcs)
+ for kw in kw2update:
+ hdr.update(kw, kw2update[kw])
+ #if wkey is None, do not archive the primary WCS
+ if key is not None:
+ wcsutil.archiveWCS(f, ext=i, wcskey=key, wcsname=name, clobber=True)
+ elif extname in ['err', 'dq', 'sdq', 'samp', 'time']:
+ cextver = extn.header['extver']
+ if cextver == sciextver:
+ hdr = f[('SCI',sciextver)].header
+ w = pywcs.WCS(hdr, f)
+ copyWCS(w, extn.header, key, name)
+ else:
+ continue
+
+ if 'NPOLCorr' in allowed_corr:
+ kw2update = npol.NPOLCorr.updateWCS(f)
+ for kw in kw2update:
+ f[1].header.update(kw, kw2update[kw])
+ # Finally record the version of the software which updated the WCS
+ if f[0].header.has_key('HISTORY'):
+ f[0].header.update(key='UPWCSVER', value=stwcsversion,
+ comment="Version of STWCS used to updated the WCS", before='HISTORY')
+ elif f[0].header.has_key('ASN_MTYP'):
+ f[0].header.update(key='UPWCSVER', value=stwcsversion,
+ comment="Version of STWCS used to updated the WCS", after='ASN_MTYP')
+ else:
+ # Find index of last non-blank card, and insert this new keyword after that card
+ for i in range(len(f[0].header.ascard)-1,0,-1):
+ if f[0].header[i].strip() != '': break
+
+ f[0].header.update(key='UPWCSVER', value=stwcsversion,
+ comment="Version of STWCS used to updated the WCS",after=i)
+ f.close()
+
+def getKeyName(hdr, wkey, wname, idcname):
+ if wkey is not None: # archive the primary WCS
+ if wkey == " ":
+ if wname == " " :
+ # get the next available key and use the IDCTABLE name as WCSNAME
+ idcname = os.path.split(idcname)[1]
+ name = ''.join(['IDC_',idcname.split('_idc.fits')[0]])
+ key = wcsutil.getKeyFromName(hdr, name)
+ if not key:
+ key = wcsutil.next_wcskey(hdr)
+ else:
+ #try to get a key from WCSNAME
+ # if not - get the next availabble key
+ name = wname
+ key = wcsutil.getKeyFromName(hdr, wname)
+ if not key:
+ key = wcsutil.next_wcskey(hdr)
+ else:
+ key = wkey
+ name = wname
+ return key, name
+
+def copyWCS(w, hdr, wkey, wname):
+ """
+ This is a convenience function to copy a WCS object
+ to a header as a primary WCS. It is used only to copy the
+ WCS of the 'SCI' extension to the headers of 'ERR', 'DQ', 'SDQ',
+ 'TIME' or 'SAMP' extensions.
+ """
+ hwcs = w.to_header()
+
+ if w.wcs.has_cd():
+ wcsutil.pc2cd(hwcs)
+ for k in hwcs.keys():
+ key = k[:7] + wkey
+ hdr.update(key=key, value=hwcs[k])
+ norient = np.rad2deg(np.arctan2(hwcs['CD1_2'],hwcs['CD2_2']))
+ okey = 'ORIENT%s' % wkey
+ hdr.update(key=okey, value=norient)
+
+def getNrefchip(fobj):
+ """
+
+ Finds which FITS extension holds the reference chip.
+
+ The reference chip has EXTNAME='SCI', can be in any extension and
+ is instrument specific. This functions provides mappings between
+ sci extensions, chips and fits extensions.
+ In the case of a subarray when the reference chip is missing, the
+ first 'SCI' extension is the reference chip.
+
+ Parameters
+ ----------
+ fobj: pyfits HDUList object
+ """
+ nrefext = 1
+ nrefchip = 1
+ instrument = fobj[0].header['INSTRUME']
+
+ if instrument == 'WFPC2':
+ chipkw = 'DETECTOR'
+ extvers = [("SCI",img.header['EXTVER']) for img in
+ fobj[1:] if img.header['EXTNAME'].lower()=='sci']
+ detectors = [img.header[chipkw] for img in fobj[1:] if
+ img.header['EXTNAME'].lower()=='sci']
+ fitsext = [i for i in range(len(fobj))[1:] if
+ fobj[i].header['EXTNAME'].lower()=='sci']
+ det2ext=dict(map(None, detectors,extvers))
+ ext2det=dict(map(None, extvers, detectors))
+ ext2fitsext=dict(map(None, extvers, fitsext))
+
+ if 3 not in detectors:
+ nrefchip = ext2det.pop(extvers[0])
+ nrefext = ext2fitsext.pop(extvers[0])
+ else:
+ nrefchip = 3
+ extname = det2ext.pop(nrefchip)
+ nrefext = ext2fitsext.pop(extname)
+
+ elif (instrument == 'ACS' and fobj[0].header['DETECTOR'] == 'WFC') or \
+ (instrument == 'WFC3' and fobj[0].header['DETECTOR'] == 'UVIS'):
+ chipkw = 'CCDCHIP'
+ extvers = [("SCI",img.header['EXTVER']) for img in
+ fobj[1:] if img.header['EXTNAME'].lower()=='sci']
+ detectors = [img.header[chipkw] for img in fobj[1:] if
+ img.header['EXTNAME'].lower()=='sci']
+ fitsext = [i for i in range(len(fobj))[1:] if
+ fobj[i].header['EXTNAME'].lower()=='sci']
+ det2ext=dict(map(None, detectors,extvers))
+ ext2det=dict(map(None, extvers, detectors))
+ ext2fitsext=dict(map(None, extvers, fitsext))
+
+ if 2 not in detectors:
+ nrefchip = ext2det.pop(extvers[0])
+ nrefext = ext2fitsext.pop(extvers[0])
+ else:
+ nrefchip = 2
+ extname = det2ext.pop(nrefchip)
+ nrefext = ext2fitsext.pop(extname)
+ else:
+ for i in range(len(fobj)):
+ extname = fobj[i].header.get('EXTNAME', None)
+ if extname != None and extname.lower == 'sci':
+ nrefext = i
+ break
+
+ return nrefchip, nrefext
+
+def checkFiles(input):
+ """
+ Checks that input files are in the correct format.
+ Converts geis and waiver fits files to multiextension fits.
+ """
+ from stsci.tools.check_files import geis2mef, waiver2mef, checkFiles
+ logger.info("\n\tChecking files %s" % input)
+ removed_files = []
+ newfiles = []
+ if not isinstance(input, list):
+ input=[input]
+
+ for file in input:
+ try:
+ imgfits,imgtype = fileutil.isFits(file)
+ except IOError:
+ logger.warning( "\n\tFile %s could not be found, removing it from the input list.\n" %file)
+ removed_files.append(file)
+ continue
+ # Check for existence of waiver FITS input, and quit if found.
+ # Or should we print a warning and continue but not use that file
+ if imgfits:
+ if imgtype == 'waiver':
+ newfilename = waiver2mef(file, convert_dq=True)
+ if newfilename == None:
+ logger.warning("\n\tRemoving file %s from input list - could not convert waiver to mef" %file)
+ removed_files.append(file)
+ else:
+ newfiles.append(newfilename)
+ else:
+ newfiles.append(file)
+
+ # If a GEIS image is provided as input, create a new MEF file with
+ # a name generated using 'buildFITSName()'
+ # Convert the corresponding data quality file if present
+ if not imgfits:
+ newfilename = geis2mef(file, convert_dq=True)
+ if newfilename == None:
+ logger.warning("\n\tRemoving file %s from input list - could not convert geis to mef" %file)
+ removed_files.append(file)
+ else:
+ newfiles.append(newfilename)
+ if removed_files:
+ logger.warning('\n\tThe following files will be removed from the list of files to be processed %s' % removed_files)
+ #for f in removed_files:
+ # print f
+
+ newfiles = checkFiles(newfiles)[0]
+ logger.info("\n\tThese files passed the input check and will be processed: %s" % newfiles)
+ return newfiles
+
+def newIDCTAB(fname):
+ #When this is called we know there's a kw IDCTAB in the header
+ idctab = fileutil.osfn(pyfits.getval(fname, 'IDCTAB'))
+ try:
+ #check for the presence of IDCTAB in the first extension
+ oldidctab = fileutil.osfn(pyfits.getval(fname, 'IDCTAB', ext=1))
+ except KeyError:
+ return False
+ if idctab == oldidctab:
+ return False
+ else:
+ return True
+
+def cleanWCS(fname):
+ # A new IDCTAB means all previously computed WCS's are invalid
+ # We are deleting all of them except the original OPUS WCS.nvalidates all WCS's.
+ keys = wcsutil.wcskeys(pyfits.getheader(fname, ext=1))
+ f = pyfits.open(fname, mode='update')
+ fext = range(len(f))
+ for key in keys:
+ wcsutil.deleteWCS(fname, ext=fext,wcskey=key)
+
+def getCorrections(instrument):
+ """
+ Print corrections available for an instrument
+
+ :Parameters:
+ `instrument`: string, one of 'WFPC2', 'NICMOS', 'STIS', 'ACS', 'WFC3'
+ """
+ acorr = apply_corrections.allowed_corrections[instrument]
+
+ print "The following corrections will be performed for instrument %s\n" % instrument
+ for c in acorr: print c,': ' , apply_corrections.cnames[c]
+
diff --git a/lib/stwcs/updatewcs/apply_corrections.py b/lib/stwcs/updatewcs/apply_corrections.py
new file mode 100644
index 0000000..fbcb502
--- /dev/null
+++ b/lib/stwcs/updatewcs/apply_corrections.py
@@ -0,0 +1,223 @@
+from __future__ import division # confidence high
+
+import os
+import pyfits
+import time
+from stsci.tools import fileutil
+import os.path
+from stwcs.wcsutil import altwcs
+
+import logging
+logger = logging.getLogger("stwcs.updatewcs.apply_corrections")
+
+#Note: The order of corrections is important
+
+__docformat__ = 'restructuredtext'
+
+# A dictionary which lists the allowed corrections for each instrument.
+# These are the default corrections applied also in the pipeline.
+
+allowed_corrections={'WFPC2': ['DET2IMCorr', 'MakeWCS','CompSIP', 'VACorr'],
+ 'ACS': ['DET2IMCorr', 'TDDCorr', 'MakeWCS', 'CompSIP','VACorr', 'NPOLCorr'],
+ 'STIS': ['MakeWCS', 'CompSIP','VACorr'],
+ 'NICMOS': ['MakeWCS', 'CompSIP','VACorr'],
+ 'WFC3': ['MakeWCS', 'CompSIP','VACorr'],
+ }
+
+cnames = {'DET2IMCorr': 'Detector to Image Correction',
+ 'TDDCorr': 'Time Dependent Distortion Correction',
+ 'MakeWCS': 'Recalculate basic WCS keywords based on the distortion model',
+ 'CompSIP': 'Given IDCTAB distortion model calculate the SIP coefficients',
+ 'VACorr': 'Velocity Aberration Correction',
+ 'NPOLCorr': 'Lookup Table Distortion'
+ }
+
+def setCorrections(fname, vacorr=True, tddcorr=True, npolcorr=True, d2imcorr=True):
+ """
+ Creates a list of corrections to be applied to a file
+ based on user input paramters and allowed corrections
+ for the instrument.
+ """
+ instrument = pyfits.getval(fname, 'INSTRUME')
+ # make a copy of this list !
+ acorr = allowed_corrections[instrument][:]
+
+ # Check if idctab is present on disk
+ # If kw IDCTAB is present in the header but the file is
+ # not found on disk, do not run TDDCorr, MakeCWS and CompSIP
+ if not foundIDCTAB(fname):
+ if 'TDDCorr' in acorr: acorr.remove('TDDCorr')
+ if 'MakeWCS' in acorr: acorr.remove('MakeWCS')
+ if 'CompSIP' in acorr: acorr.remove('CompSIP')
+
+ if 'VACorr' in acorr and vacorr==False: acorr.remove('VACorr')
+ if 'TDDCorr' in acorr:
+ tddcorr = applyTDDCorr(fname, tddcorr)
+ if tddcorr == False: acorr.remove('TDDCorr')
+
+ if 'NPOLCorr' in acorr:
+ npolcorr = applyNpolCorr(fname, npolcorr)
+ if npolcorr == False: acorr.remove('NPOLCorr')
+ if 'DET2IMCorr' in acorr:
+ d2imcorr = applyD2ImCorr(fname, d2imcorr)
+ if d2imcorr == False: acorr.remove('DET2IMCorr')
+ logger.info("\n\tCorrections to be applied to %s: %s " % (fname, str(acorr)))
+ return acorr
+
+def foundIDCTAB(fname):
+ try:
+ idctab = fileutil.osfn(pyfits.getval(fname, 'IDCTAB'))
+ except KeyError:
+ return False
+ if idctab == 'N/A' or idctab == "":
+ return False
+ if os.path.exists(idctab):
+ return True
+ else:
+ return False
+
+def applyTDDCorr(fname, utddcorr):
+ """
+ The default value of tddcorr for all ACS images is True.
+ This correction will be performed if all conditions below are True:
+ - the user did not turn it off on the command line
+ - the detector is WFC
+ - the idc table specified in the primary header is available.
+ """
+ instrument = pyfits.getval(fname, 'INSTRUME')
+ try:
+ detector = pyfits.getval(fname, 'DETECTOR')
+ except KeyError:
+ detector = None
+ try:
+ tddswitch = pyfits.getval(fname, 'TDDCORR')
+ except KeyError:
+ tddswitch = 'PERFORM'
+
+ if instrument == 'ACS' and detector == 'WFC' and utddcorr == True and tddswitch == 'PERFORM':
+ tddcorr = True
+ try:
+ idctab = pyfits.getval(fname, 'IDCTAB')
+ except KeyError:
+ tddcorr = False
+ #print "***IDCTAB keyword not found - not applying TDD correction***\n"
+ if os.path.exists(fileutil.osfn(idctab)):
+ tddcorr = True
+ else:
+ tddcorr = False
+ #print "***IDCTAB file not found - not applying TDD correction***\n"
+ else:
+ tddcorr = False
+
+ return tddcorr
+
+def applyNpolCorr(fname, unpolcorr):
+ """
+ Determines whether non-polynomial distortion lookup tables should be added
+ as extensions to the science file based on the 'NPOLFILE' keyword in the
+ primary header and NPOLEXT kw in the first extension.
+ This is a default correction and will always run in the pipeline.
+ The file used to generate the extensions is
+ recorded in the NPOLEXT keyword in the first science extension.
+ If 'NPOLFILE' in the primary header is different from 'NPOLEXT' in the
+ extension header and the file exists on disk and is a 'new type' npolfile,
+ then the lookup tables will be updated as 'WCSDVARR' extensions.
+ """
+ applyNPOLCorr = True
+ try:
+ # get NPOLFILE kw from primary header
+ fnpol0 = pyfits.getval(fname, 'NPOLFILE')
+ if fnpol0 == 'N/A':
+ return False
+ fnpol0 = fileutil.osfn(fnpol0)
+ if not fileutil.findFile(fnpol0):
+ msg = """\n\tKw "NPOLFILE" exists in primary header but file %s not found
+ Non-polynomial distortion correction will not be applied\n
+ """ % fnpol0
+ logger.critical(msg)
+ applyNPOLCorr = False
+ return applyNPOLCorr
+ try:
+ # get NPOLEXT kw from first extension header
+ fnpol1 = pyfits.getval(fname, 'NPOLEXT', ext=1)
+ fnpol1 = fileutil.osfn(fnpol1)
+ if fnpol1 and fileutil.findFile(fnpol1):
+ if fnpol0 != fnpol1:
+ applyNPOLCorr = True
+ else:
+ msg = """\n\tNPOLEXT with the same value as NPOLFILE found in first extension.
+ NPOL correction will not be applied."""
+ logger.info(msg)
+ applyNPOLCorr = False
+ else:
+ # npl file defined in first extension may not be found
+ # but if a valid kw exists in the primary header, non-polynomial
+ #distortion correction should be applied.
+ applyNPOLCorr = True
+ except KeyError:
+ # the case of "NPOLFILE" kw present in primary header but "NPOLEXT" missing
+ # in first extension header
+ applyNPOLCorr = True
+ except KeyError:
+ logger.info('\n\t"NPOLFILE" keyword not found in primary header')
+ applyNPOLCorr = False
+ return applyNPOLCorr
+
+ if isOldStyleDGEO(fname, fnpol0):
+ applyNPOLCorr = False
+ return (applyNPOLCorr and unpolcorr)
+
+def isOldStyleDGEO(fname, dgname):
+ # checks if the file defined in a NPOLFILE kw is a full size
+ # (old style) image
+
+ sci_naxis1 = pyfits.getval(fname, 'NAXIS1', ext=1)
+ sci_naxis2 = pyfits.getval(fname, 'NAXIS2', ext=1)
+ dg_naxis1 = pyfits.getval(dgname, 'NAXIS1', ext=1)
+ dg_naxis2 = pyfits.getval(dgname, 'NAXIS2', ext=1)
+ if sci_naxis1 <= dg_naxis1 or sci_naxis2 <= dg_naxis2:
+ msg = """\n\tOnly full size (old style) DGEO file was found.\n
+ Non-polynomial distortion correction will not be applied."""
+ logger.critical(msg)
+ return True
+ else:
+ return False
+
+def applyD2ImCorr(fname, d2imcorr):
+ applyD2IMCorr = True
+ try:
+ # get D2IMFILE kw from primary header
+ fd2im0 = pyfits.getval(fname, 'D2IMFILE')
+ if fd2im0 == 'N/A':
+ return False
+ fd2im0 = fileutil.osfn(fd2im0)
+ if not fileutil.findFile(fd2im0):
+ msg = """\n\tKw D2IMFILE exists in primary header but file %s not found\n
+ Detector to image correction will not be applied\n""" % fd2im0
+ logger.critical(msg)
+ print msg
+ applyD2IMCorr = False
+ return applyD2IMCorr
+ try:
+ # get D2IMEXT kw from first extension header
+ fd2imext = pyfits.getval(fname, 'D2IMEXT', ext=1)
+ fd2imext = fileutil.osfn(fd2imext)
+ if fd2imext and fileutil.findFile(fd2imext):
+ if fd2im0 != fd2imext:
+ applyD2IMCorr = True
+ else:
+ applyD2IMCorr = False
+ else:
+ # D2IM file defined in first extension may not be found
+ # but if a valid kw exists in the primary header,
+ # detector to image correction should be applied.
+ applyD2IMCorr = True
+ except KeyError:
+ # the case of D2IMFILE kw present in primary header but D2IMEXT missing
+ # in first extension header
+ applyD2IMCorr = True
+ except KeyError:
+ print 'D2IMFILE keyword not found in primary header'
+ applyD2IMCorr = False
+ return applyD2IMCorr
+
diff --git a/lib/stwcs/updatewcs/corrections.py b/lib/stwcs/updatewcs/corrections.py
new file mode 100644
index 0000000..b227d05
--- /dev/null
+++ b/lib/stwcs/updatewcs/corrections.py
@@ -0,0 +1,230 @@
+from __future__ import division # confidence high
+
+import datetime
+import numpy as np
+from numpy import linalg
+from stsci.tools import fileutil
+from utils import diff_angles
+import makewcs, npol
+
+import logging, time
+logger=logging.getLogger('stwcs.updatewcs.corrections')
+
+MakeWCS = makewcs.MakeWCS
+NPOLCorr = npol.NPOLCorr
+
+class TDDCorr(object):
+ """
+ Apply time dependent distortion correction to distortion coefficients and basic
+ WCS keywords. This correction **must** be done before any other WCS correction.
+
+ Parameters
+ ----------
+ ext_wcs: HSTWCS object
+ An HSTWCS object to be modified
+ ref_wcs: HSTWCS object
+ A reference HSTWCS object
+
+ Notes
+ -----
+ Compute the ACS/WFC time dependent distortion terms as described
+ in [1]_ and apply the correction to the WCS of the observation.
+
+ The model coefficients are stored in the primary header of the IDCTAB.
+ :math:`D_{ref}` is the reference date. The computed corrections are saved
+ in the science extension header as TDDALPHA and TDDBETA keywords.
+
+ .. math:: TDDALPHA = A_{0} + {A_{1}*(obsdate - D_{ref})}
+
+ .. math:: TDDBETA = B_{0} + B_{1}*(obsdate - D_{ref})
+
+
+ The time dependent distortion affects the IDCTAB coefficients, and the
+ relative location of the two chips. Because the linear order IDCTAB
+ coefficients ar eused in the computatuion of the NPOL extensions,
+ the TDD correction affects all components of the distortion model.
+
+ Application of TDD to the IDCTAB polynomial coefficients:
+ The TDD model is computed in Jay's frame, while the IDCTAB coefficients
+ are in the HST V2/V3 frame. The coefficients are transformed to Jay's frame,
+ TDD is applied and they are transformed back to the V2/V3 frame. This
+ correction is performed in this class.
+
+ Application of TDD to the relative location of the two chips is
+ done in `makewcs`.
+
+ References
+ ----------
+ .. [1] Jay Anderson, "Variation of the Distortion Solution of the WFC", ACS ISR 2007-08.
+
+ """
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ """
+ - Calculates alpha and beta for ACS/WFC data.
+ - Writes 2 new kw to the extension header: TDDALPHA and TDDBETA
+ """
+ logger.info("\n\tStarting TDDCorr: %s" % time.asctime())
+ alpha, beta = cls.compute_alpha_beta(ext_wcs)
+ cls.apply_tdd2idc(ref_wcs, alpha, beta)
+ cls.apply_tdd2idc(ext_wcs, alpha, beta)
+ ext_wcs.idcmodel.refpix['TDDALPHA'] = alpha
+ ext_wcs.idcmodel.refpix['TDDBETA'] = beta
+ ref_wcs.idcmodel.refpix['TDDALPHA'] = alpha
+ ref_wcs.idcmodel.refpix['TDDBETA'] = beta
+
+ newkw = {'TDDALPHA': alpha, 'TDDBETA':beta, 'OCX10':ext_wcs.idcmodel.cx[1,0],
+ 'OCX11':ext_wcs.idcmodel.cx[1,1],'OCY10':ext_wcs.idcmodel.cy[1,0],
+ 'OCY11':ext_wcs.idcmodel.cy[1,1],}
+
+ return newkw
+ updateWCS = classmethod(updateWCS)
+
+ def apply_tdd2idc(cls, hwcs, alpha, beta):
+ """
+ Applies TDD to the idctab coefficients of a ACS/WFC observation.
+ This should be always the first correction.
+ """
+ theta_v2v3 = 2.234529
+ mrotp = fileutil.buildRotMatrix(theta_v2v3)
+ mrotn = fileutil.buildRotMatrix(-theta_v2v3)
+ tdd_mat = np.array([[1+(beta/2048.), alpha/2048.],[alpha/2048.,1-(beta/2048.)]],np.float64)
+ abmat1 = np.dot(tdd_mat, mrotn)
+ abmat2 = np.dot(mrotp,abmat1)
+ xshape, yshape = hwcs.idcmodel.cx.shape, hwcs.idcmodel.cy.shape
+ icxy = np.dot(abmat2,[hwcs.idcmodel.cx.ravel(), hwcs.idcmodel.cy.ravel()])
+ hwcs.idcmodel.cx = icxy[0]
+ hwcs.idcmodel.cy = icxy[1]
+ hwcs.idcmodel.cx.shape = xshape
+ hwcs.idcmodel.cy.shape = yshape
+
+ apply_tdd2idc = classmethod(apply_tdd2idc)
+
+ def compute_alpha_beta(cls, ext_wcs):
+ """
+ Compute the ACS time dependent distortion skew terms
+ as described in ACS ISR 07-08 by J. Anderson.
+
+ Jay's code only computes the alpha/beta values based on a decimal year
+ with only 3 digits, so this line reproduces that when needed for comparison
+ with his results.
+ rday = float(('%0.3f')%rday)
+
+ The zero-point terms account for the skew accumulated between
+ 2002.0 and 2004.5, when the latest IDCTAB was delivered.
+ alpha = 0.095 + 0.090*(rday-dday)/2.5
+ beta = -0.029 - 0.030*(rday-dday)/2.5
+ """
+ if not isinstance(ext_wcs.date_obs,float):
+ year,month,day = ext_wcs.date_obs.split('-')
+ rdate = datetime.datetime(int(year),int(month),int(day))
+ rday = float(rdate.strftime("%j"))/365.25 + rdate.year
+ else:
+ rday = ext_wcs.date_obs
+
+ skew_coeffs = ext_wcs.idcmodel.refpix['skew_coeffs']
+ if skew_coeffs is None:
+ # Only print out warning for post-SM4 data where this may matter
+ if rday > 2009.0:
+ err_str = "------------------------------------------------------------------------ \n"
+ err_str += "WARNING: the IDCTAB geometric distortion file specified in the image \n"
+ err_str += " header did not have the time-dependent distortion coefficients. \n"
+ err_str += " The pre-SM4 time-dependent skew solution will be used by default.\n"
+ err_str += " Please update IDCTAB with new reference file from HST archive. \n"
+ err_str += "------------------------------------------------------------------------ \n"
+ print err_str
+ # Using default pre-SM4 coefficients
+ skew_coeffs = {'TDD_A':[0.095,0.090/2.5],
+ 'TDD_B':[-0.029,-0.030/2.5],
+ 'TDD_DATE':2004.5,'TDDORDER':1}
+
+ alpha = 0
+ beta = 0
+ # Compute skew terms, allowing for non-linear coefficients as well
+ for c in range(skew_coeffs['TDDORDER']+1):
+ alpha += skew_coeffs['TDD_A'][c]* np.power((rday-skew_coeffs['TDD_DATE']),c)
+ beta += skew_coeffs['TDD_B'][c]*np.power((rday-skew_coeffs['TDD_DATE']),c)
+
+ return alpha,beta
+ compute_alpha_beta = classmethod(compute_alpha_beta)
+
+
+class VACorr(object):
+ """
+ Apply velocity aberation correction to WCS keywords.
+
+ Notes
+ -----
+ Velocity Aberration is stored in the extension header keyword 'VAFACTOR'.
+ The correction is applied to the CD matrix and CRVALs.
+
+ """
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ logger.info("\n\tStarting VACorr: %s" % time.asctime())
+ if ext_wcs.vafactor != 1:
+ ext_wcs.wcs.cd = ext_wcs.wcs.cd * ext_wcs.vafactor
+ crval0 = ref_wcs.wcs.crval[0] + ext_wcs.vafactor*diff_angles(ext_wcs.wcs.crval[0],
+ ref_wcs.wcs.crval[0])
+ crval1 = ref_wcs.wcs.crval[1] + ext_wcs.vafactor*diff_angles(ext_wcs.wcs.crval[1],
+ ref_wcs.wcs.crval[1])
+ crval = np.array([crval0,crval1])
+ ext_wcs.wcs.crval = crval
+ ext_wcs.wcs.set()
+ else:
+ pass
+ kw2update={'CD1_1': ext_wcs.wcs.cd[0,0], 'CD1_2':ext_wcs.wcs.cd[0,1],
+ 'CD2_1':ext_wcs.wcs.cd[1,0], 'CD2_2':ext_wcs.wcs.cd[1,1],
+ 'CRVAL1':ext_wcs.wcs.crval[0], 'CRVAL2':ext_wcs.wcs.crval[1]}
+ return kw2update
+
+ updateWCS = classmethod(updateWCS)
+
+
+class CompSIP(object):
+ """
+ Compute Simple Imaging Polynomial (SIP) coefficients as defined in [2]_
+ from IDC table coefficients.
+
+ This class transforms the TDD corrected IDCTAB coefficients into SIP format.
+ It also applies a binning factor to the coefficients if the observation was
+ binned.
+
+ References
+ ----------
+ .. [2] David Shupe, et al, "The SIP Convention of representing Distortion
+ in FITS Image headers", Astronomical Data Analysis Software And Systems, ASP
+ Conference Series, Vol. 347, 2005
+
+ """
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ logger.info("\n\tStarting CompSIP: %s" %time.asctime())
+ kw2update = {}
+ order = ext_wcs.idcmodel.norder
+ kw2update['A_ORDER'] = order
+ kw2update['B_ORDER'] = order
+ pscale = ext_wcs.idcmodel.refpix['PSCALE']
+
+ cx = ext_wcs.idcmodel.cx
+ cy = ext_wcs.idcmodel.cy
+
+ matr = np.array([[cx[1,1],cx[1,0]], [cy[1,1],cy[1,0]]], dtype=np.float64)
+ imatr = linalg.inv(matr)
+ akeys1 = np.zeros((order+1,order+1), dtype=np.float64)
+ bkeys1 = np.zeros((order+1,order+1), dtype=np.float64)
+ for n in range(order+1):
+ for m in range(order+1):
+ if n >= m and n>=2:
+ idcval = np.array([[cx[n,m]],[cy[n,m]]])
+ sipval = np.dot(imatr, idcval)
+ akeys1[m,n-m] = sipval[0]
+ bkeys1[m,n-m] = sipval[1]
+ Akey="A_%d_%d" % (m,n-m)
+ Bkey="B_%d_%d" % (m,n-m)
+ kw2update[Akey] = sipval[0,0] * ext_wcs.binned
+ kw2update[Bkey] = sipval[1,0] * ext_wcs.binned
+ kw2update['CTYPE1'] = 'RA---TAN-SIP'
+ kw2update['CTYPE2'] = 'DEC--TAN-SIP'
+ return kw2update
+
+ updateWCS = classmethod(updateWCS)
+
+
diff --git a/lib/stwcs/updatewcs/det2im.py b/lib/stwcs/updatewcs/det2im.py
new file mode 100644
index 0000000..fe683b4
--- /dev/null
+++ b/lib/stwcs/updatewcs/det2im.py
@@ -0,0 +1,200 @@
+from __future__ import division # confidence high
+
+import time
+import pyfits
+from stsci.tools import fileutil
+import utils
+
+import logging
+logger = logging.getLogger('stwcs.updatewcs.Det2IM')
+
+class DET2IMCorr(object):
+ """
+ Stores a small correction to the detector coordinates as a d2imarr
+ extension in the science file.
+
+ Notes
+ -----
+ For the case of ACS/WFC every 68th column is wider than the rest.
+ To compensate for this a small correction needs to be applied to the
+ detector coordinates. We call this a detector to image transformation.
+ The so obtained image coordinates are the input to all other distortion
+ corrections. The correction is originally stored in an external
+ reference file pointed to by 'D2IMFILE' keyword in the primary header.
+ This class attaches the correction array as an extension to the science
+ file with extname = `d2imarr`.
+
+ Other keywords used in this correction are:
+
+ `AXISCORR`: integer (1 or 2) - axis to which the detector to image
+ correction is applied
+
+ `D2IMEXT`: string = name of reference file which was used to create
+ the lookup table extension
+
+ `D2IMERR`: float, optional - maximum value of the correction
+
+ """
+ def updateWCS(cls, fobj):
+ """
+ Parameters
+ ----------
+ fobj: pyfits object
+ Science file, for which a detector to image correction
+ is available
+
+ Notes
+ -----
+ Uses the file pointed to in the primary header keyword 'D2IMFILE'
+ to create an extension with a detector to image correction.
+ """
+ logger.info("\n\tStarting Det2IM Correction: %s" % time.asctime())
+ try:
+ assert isinstance(fobj, pyfits.HDUList)
+ except AssertionError:
+ logger.exception('\n\tInput must be a pyfits.HDUList object')
+ raise
+
+ d2imfile = fileutil.osfn(fobj[0].header['D2IMFILE'])
+ axiscorr = cls.getAxisCorr(d2imfile)
+ d2imerr = pyfits.getdata(d2imfile, ext=1).max()
+ if axiscorr == None:
+ new_kw = {}
+ else:
+ new_kw = {'D2IMEXT': d2imfile, 'AXISCORR': axiscorr, 'D2IMERR': d2imerr}
+ cls.applyDet2ImCorr(fobj,axiscorr)
+ cls.updatehdr(fobj, new_kw)
+
+ updateWCS = classmethod(updateWCS)
+
+ def getAxisCorr(cls, refname):
+ try:
+ direction = pyfits.getval(refname, ext=1, key='EXTNAME')
+ if direction == 'DX': return 1
+ elif direction == 'DY': return 2
+ else:
+ logger.warning('\n\tDET2IM correction expects the reference file to have \
+ an EXTNAME keyword of value "DX" or "DY", EXTNAMe %s detected' % direction)
+ return None
+ except KeyError:
+ logger.exception("\n\tD2IMFILE %s is missing EXTNAME keyword. Unable to determine axis \
+ to which to apply the correction." % refname)
+ direction = None
+ return direction
+ getAxisCorr = classmethod(getAxisCorr)
+
+ def applyDet2ImCorr(cls,fobj, axiscorr):
+ binned = utils.getBinning(fobj)
+ hdu = cls.createDgeoHDU(fobj, axiscorr, binned)
+ d2imarr_ind = cls.getD2imIndex(fobj)
+ if d2imarr_ind:
+ fobj[d2imarr_ind] = hdu
+ else:
+ fobj.append(hdu)
+ applyDet2ImCorr = classmethod(applyDet2ImCorr)
+
+ def getD2imIndex(cls,fobj):
+ index = None
+ for e in range(len(fobj)):
+ try:
+ ename = fobj[e].header['EXTNAME']
+ except KeyError:
+ continue
+ if ename == 'D2IMARR':
+ index = e
+ return index
+ getD2imIndex = classmethod(getD2imIndex)
+
+ def createDgeoHDU(cls, fobj, axiscorr, binned=1):
+ d2imfile = fileutil.osfn(fobj[0].header['D2IMFILE'])
+ d2im_data = pyfits.getdata(d2imfile, ext=1)
+ sci_hdr = fobj['sci',1].header
+ d2im_hdr = cls.createDet2ImHdr(fobj, binned)
+ hdu = pyfits.ImageHDU(header=d2im_hdr, data=d2im_data)
+
+ return hdu
+
+ createDgeoHDU = classmethod(createDgeoHDU)
+
+ def createDet2ImHdr(cls, fobj, binned=1):
+ """
+ Creates a header for the D2IMARR extension based on the
+ reference file recorded in D2IMFILE keyword in the primary header.
+ fobj - the science file
+
+ """
+ d2imfile = fileutil.osfn(fobj[0].header['D2IMFILE'])
+ axiscorr = cls.getAxisCorr(d2imfile)
+ sci_hdr = fobj[1].header
+ data_shape = pyfits.getdata(d2imfile, ext=1).shape
+ naxis = pyfits.getval(d2imfile, ext=1, key='NAXIS')
+
+ kw = { 'NAXIS': 'Size of the axis',
+ 'CRPIX': 'Coordinate system reference pixel',
+ 'CRVAL': 'Coordinate system value at reference pixel',
+ 'CDELT': 'Coordinate increment along axis'}
+
+ kw_comm1 = {}
+ kw_val1 = {}
+ for key in kw.keys():
+ for i in range(1, naxis+1):
+ si = str(i)
+ kw_comm1[key+si] = kw[key]
+
+ for i in range(1, naxis+1):
+ si = str(i)
+ kw_val1['NAXIS'+si] = data_shape[i-1]
+ kw_val1['CRPIX'+si] = data_shape[i-1]/2.
+ kw_val1['CDELT'+si] = 1./binned
+ kw_val1['CRVAL'+si] = (sci_hdr.get('NAXIS'+si, 1)/2. + \
+ sci_hdr.get('LTV'+si, 0.)) / binned
+
+
+ kw_comm0 = {'XTENSION': 'Image extension',
+ 'BITPIX': 'IEEE floating point',
+ 'NAXIS': 'Number of axes',
+ 'EXTNAME': 'WCS distortion array',
+ 'EXTVER': 'Distortion array version number',
+ 'PCOUNT': 'Special data area of size 0',
+ 'GCOUNT': 'One data group',
+ 'AXISCORR': 'Direction in which the det2im correction is applied'}
+
+ kw_val0 = { 'XTENSION': 'IMAGE',
+ 'BITPIX': -32,
+ 'NAXIS': naxis,
+ 'EXTNAME': 'D2IMARR',
+ 'EXTVER': 1,
+ 'PCOUNT': 0,
+ 'GCOUNT': 1,
+ 'AXISCORR': axiscorr
+ }
+
+
+ cdl = pyfits.CardList()
+ for key in kw_comm0.keys():
+ cdl.append(pyfits.Card(key=key, value=kw_val0[key], comment=kw_comm0[key]))
+ for key in kw_comm1.keys():
+ cdl.append(pyfits.Card(key=key, value=kw_val1[key], comment=kw_comm1[key]))
+
+ hdr = pyfits.Header(cards=cdl)
+ return hdr
+
+ createDet2ImHdr = classmethod(createDet2ImHdr)
+
+ def updatehdr(cls, fobj, kwdict):
+ """
+ Update extension headers to keep record of the files used for the
+ detector to image correction.
+ """
+ for ext in fobj:
+ try:
+ extname = ext.header['EXTNAME'].lower()
+ except KeyError:
+ continue
+ if extname == 'sci':
+ for kw in kwdict:
+ ext.header.update(kw, kwdict[kw])
+ else:
+ continue
+ updatehdr = classmethod(updatehdr)
+
diff --git a/lib/stwcs/updatewcs/makewcs.py b/lib/stwcs/updatewcs/makewcs.py
new file mode 100644
index 0000000..bb86be9
--- /dev/null
+++ b/lib/stwcs/updatewcs/makewcs.py
@@ -0,0 +1,251 @@
+from __future__ import division # confidence high
+
+from stwcs import DEGTORAD, RADTODEG
+import numpy as np
+from math import sin, sqrt, pow, cos, asin, atan2,pi
+import utils
+from stsci.tools import fileutil
+
+import logging, time
+logger = logging.getLogger(__name__)
+
+class MakeWCS(object):
+ """
+ Recompute basic WCS keywords based on PA_V3 and distortion model.
+
+ Notes
+ -----
+ - Compute the reference chip WCS:
+
+ -- CRVAL: transform the model XREF/YREF to the sky
+ -- PA_V3 is calculated at the target position and adjusted
+ for each chip orientation
+ -- CD: PA_V3 and the model scale are used to cnstruct a CD matrix
+
+ - Compute the second chip WCS:
+
+ -- CRVAL: - the distance between the zero points of the two
+ chip models on the sky
+ -- CD matrix: first order coefficients are added to the components
+ of this distance and transfered on the sky. The difference
+ between CRVAL and these vectors is the new CD matrix for each chip.
+ -- CRPIX: chip's model zero point in pixel space (XREF/YREF)
+
+ - Time dependent distortion correction is applied to both chips' V2/V3 values.
+
+ """
+ tdd_xyref = {1: [2048, 3072], 2:[2048, 1024]}
+ def updateWCS(cls, ext_wcs, ref_wcs):
+ """
+ recomputes the basic WCS kw
+ """
+ logger.info("\n\tStarting MakeWCS: %s" % time.asctime())
+ ltvoff, offshift = cls.getOffsets(ext_wcs)
+
+ v23_corr = cls.zero_point_corr(ext_wcs)
+ rv23_corr = cls.zero_point_corr(ref_wcs)
+
+ cls.uprefwcs(ext_wcs, ref_wcs, rv23_corr, ltvoff, offshift)
+ cls.upextwcs(ext_wcs, ref_wcs, v23_corr, rv23_corr, ltvoff, offshift)
+
+ kw2update = {'CD1_1': ext_wcs.wcs.cd[0,0],
+ 'CD1_2': ext_wcs.wcs.cd[0,1],
+ 'CD2_1': ext_wcs.wcs.cd[1,0],
+ 'CD2_2': ext_wcs.wcs.cd[1,1],
+ 'CRVAL1': ext_wcs.wcs.crval[0],
+ 'CRVAL2': ext_wcs.wcs.crval[1],
+ 'CRPIX1': ext_wcs.wcs.crpix[0],
+ 'CRPIX2': ext_wcs.wcs.crpix[1],
+ 'IDCTAB': ext_wcs.idctab,
+ }
+ return kw2update
+
+ updateWCS = classmethod(updateWCS)
+
+ def upextwcs(cls, ext_wcs, ref_wcs, v23_corr, rv23_corr, loff, offsh):
+ """
+ updates an extension wcs
+ """
+ ltvoffx, ltvoffy = loff[0], loff[1]
+ offshiftx, offshifty = offsh[0], offsh[1]
+ ltv1 = ext_wcs.ltv1
+ ltv2 = ext_wcs.ltv2
+ if ltv1 != 0. or ltv2 != 0.:
+ offsetx = ext_wcs.wcs.crpix[0] - ltv1 - ext_wcs.idcmodel.refpix['XREF']
+ offsety = ext_wcs.wcs.crpix[1] - ltv2 - ext_wcs.idcmodel.refpix['YREF']
+ fx,fy = ext_wcs.idcmodel.shift(ext_wcs.idcmodel.cx,ext_wcs.idcmodel.cy,offsetx,offsety)
+ else:
+ fx, fy = ext_wcs.idcmodel.cx, ext_wcs.idcmodel.cy
+
+ tddscale = (ref_wcs.pscale/fx[1,1])
+ v2 = ext_wcs.idcmodel.refpix['V2REF'] + v23_corr[0,0] * tddscale
+ v3 = ext_wcs.idcmodel.refpix['V3REF'] - v23_corr[1,0] * tddscale
+ v2ref = ref_wcs.idcmodel.refpix['V2REF'] + rv23_corr[0,0] * tddscale
+ v3ref = ref_wcs.idcmodel.refpix['V3REF'] - rv23_corr[1,0] * tddscale
+
+ R_scale = ref_wcs.idcmodel.refpix['PSCALE']/3600.0
+ off = sqrt((v2-v2ref)**2 + (v3-v3ref)**2)/(R_scale*3600.0)
+
+ if v3 == v3ref:
+ theta=0.0
+ else:
+ theta = atan2(ext_wcs.parity[0][0]*(v2-v2ref), ext_wcs.parity[1][1]*(v3-v3ref))
+
+ if ref_wcs.idcmodel.refpix['THETA']: theta += ref_wcs.idcmodel.refpix['THETA']*pi/180.0
+
+ dX=(off*sin(theta)) + offshiftx
+ dY=(off*cos(theta)) + offshifty
+
+ px = np.array([[dX,dY]])
+ newcrval = ref_wcs.wcs.p2s(px, 1)['world'][0]
+ newcrpix = np.array([ext_wcs.idcmodel.refpix['XREF'] + ltvoffx,
+ ext_wcs.idcmodel.refpix['YREF'] + ltvoffy])
+ ext_wcs.wcs.crval = newcrval
+ ext_wcs.wcs.crpix = newcrpix
+ ext_wcs.wcs.set()
+
+ # Create a small vector, in reference image pixel scale
+ delmat = np.array([[fx[1,1], fy[1,1]], \
+ [fx[1,0], fy[1,0]]]) / R_scale/3600.
+
+ # Account for subarray offset
+ # Angle of chip relative to chip
+ if ext_wcs.idcmodel.refpix['THETA']:
+ dtheta = ext_wcs.idcmodel.refpix['THETA'] - ref_wcs.idcmodel.refpix['THETA']
+ else:
+ dtheta = 0.0
+
+ rrmat = fileutil.buildRotMatrix(dtheta)
+ # Rotate the vectors
+ dxy = np.dot(delmat, rrmat)
+ wc = ref_wcs.wcs.p2s((px + dxy), 1)['world']
+
+ # Calculate the new CDs and convert to degrees
+ cd11 = utils.diff_angles(wc[0,0],newcrval[0])*cos(newcrval[1]*pi/180.0)
+ cd12 = utils.diff_angles(wc[1,0],newcrval[0])*cos(newcrval[1]*pi/180.0)
+ cd21 = utils.diff_angles(wc[0,1],newcrval[1])
+ cd22 = utils.diff_angles(wc[1,1],newcrval[1])
+ cd = np.array([[cd11, cd12], [cd21, cd22]])
+ ext_wcs.wcs.cd = cd
+ ext_wcs.wcs.set()
+
+ upextwcs = classmethod(upextwcs)
+
+ def uprefwcs(cls, ext_wcs, ref_wcs, rv23_corr_tdd, loff, offsh):
+ """
+ Updates the reference chip
+ """
+ ltvoffx, ltvoffy = loff[0], loff[1]
+ offshift = offsh
+ dec = ref_wcs.wcs.crval[1]
+ tddscale = (ref_wcs.pscale/ext_wcs.idcmodel.cx[1,1])
+ rv23 = [ref_wcs.idcmodel.refpix['V2REF'] + (rv23_corr_tdd[0,0] *tddscale),
+ ref_wcs.idcmodel.refpix['V3REF'] - (rv23_corr_tdd[1,0] * tddscale)]
+ # Get an approximate reference position on the sky
+ rref = np.array([[ref_wcs.idcmodel.refpix['XREF']+ltvoffx ,
+ ref_wcs.idcmodel.refpix['YREF']+ltvoffy]])
+
+ crval = ref_wcs.wcs.p2s(rref, 1)['world'][0]
+ # Convert the PA_V3 orientation to the orientation at the aperture
+ # This is for the reference chip only - we use this for the
+ # reference tangent plane definition
+ # It has the same orientation as the reference chip
+ pv = troll(ext_wcs.pav3,dec,rv23[0], rv23[1])
+ # Add the chip rotation angle
+ if ref_wcs.idcmodel.refpix['THETA']:
+ pv += ref_wcs.idcmodel.refpix['THETA']
+
+
+ # Set values for the rest of the reference WCS
+ ref_wcs.wcs.crval = crval
+ ref_wcs.wcs.crpix = np.array([0.0,0.0])+offsh
+ parity = ref_wcs.parity
+ R_scale = ref_wcs.idcmodel.refpix['PSCALE']/3600.0
+ cd11 = parity[0][0] * cos(pv*pi/180.0)*R_scale
+ cd12 = parity[0][0] * -sin(pv*pi/180.0)*R_scale
+ cd21 = parity[1][1] * sin(pv*pi/180.0)*R_scale
+ cd22 = parity[1][1] * cos(pv*pi/180.0)*R_scale
+
+ rcd = np.array([[cd11, cd12], [cd21, cd22]])
+ ref_wcs.wcs.cd = rcd
+ ref_wcs.wcs.set()
+
+ uprefwcs = classmethod(uprefwcs)
+
+ def zero_point_corr(cls,hwcs):
+ try:
+ alpha = hwcs.idcmodel.refpix['TDDALPHA']
+ beta = hwcs.idcmodel.refpix['TDDBETA']
+ except KeyError:
+ alpha = 0.0
+ beta = 0.0
+ v23_corr = np.array([[0.],[0.]])
+ logger.debug("\n\tTDD Zero point correction for chip %s defaulted to: %s" % (hwcs.chip, v23_corr))
+ return v23_corr
+
+ tdd = np.array([[beta, alpha], [alpha, -beta]])
+ mrotp = fileutil.buildRotMatrix(2.234529)/2048.
+ xy0 = np.array([[cls.tdd_xyref[hwcs.chip][0]-2048.], [cls.tdd_xyref[hwcs.chip][1]-2048.]])
+ v23_corr = np.dot(mrotp,np.dot(tdd,xy0)) * 0.05
+ logger.debug("\n\tTDD Zero point correction for chip %s: %s" % (hwcs.chip, v23_corr))
+ return v23_corr
+
+ zero_point_corr = classmethod(zero_point_corr)
+
+ def getOffsets(cls, ext_wcs):
+ ltv1 = ext_wcs.ltv1
+ ltv2 = ext_wcs.ltv2
+
+ offsetx = ext_wcs.wcs.crpix[0] - ltv1 - ext_wcs.idcmodel.refpix['XREF']
+ offsety = ext_wcs.wcs.crpix[1] - ltv2 - ext_wcs.idcmodel.refpix['YREF']
+
+ shiftx = ext_wcs.idcmodel.refpix['XREF'] + ltv1
+ shifty = ext_wcs.idcmodel.refpix['YREF'] + ltv2
+ if ltv1 != 0. or ltv2 != 0.:
+ ltvoffx = ltv1 + offsetx
+ ltvoffy = ltv2 + offsety
+ offshiftx = offsetx + shiftx
+ offshifty = offsety + shifty
+ else:
+ ltvoffx = 0.
+ ltvoffy = 0.
+ offshiftx = 0.
+ offshifty = 0.
+
+ ltvoff = np.array([ltvoffx, ltvoffy])
+ offshift = np.array([offshiftx, offshifty])
+ return ltvoff, offshift
+
+ getOffsets = classmethod(getOffsets)
+
+
+def troll(roll, dec, v2, v3):
+ """ Computes the roll angle at the target position based on:
+ the roll angle at the V1 axis(roll),
+ the dec of the target(dec), and
+ the V2/V3 position of the aperture (v2,v3) in arcseconds.
+
+ Based on algorithm provided by Colin Cox and used in
+ Generic Conversion at STScI.
+ """
+ # Convert all angles to radians
+ _roll = DEGTORAD(roll)
+ _dec = DEGTORAD(dec)
+ _v2 = DEGTORAD(v2 / 3600.)
+ _v3 = DEGTORAD(v3 / 3600.)
+
+ # compute components
+ sin_rho = sqrt((pow(sin(_v2),2)+pow(sin(_v3),2)) - (pow(sin(_v2),2)*pow(sin(_v3),2)))
+ rho = asin(sin_rho)
+ beta = asin(sin(_v3)/sin_rho)
+ if _v2 < 0: beta = pi - beta
+ gamma = asin(sin(_v2)/sin_rho)
+ if _v3 < 0: gamma = pi - gamma
+ A = pi/2. + _roll - beta
+ B = atan2( sin(A)*cos(_dec), (sin(_dec)*sin_rho - cos(_dec)*cos(rho)*cos(A)))
+
+ # compute final value
+ troll = RADTODEG(pi - (gamma+B))
+
+ return troll
+
diff --git a/lib/stwcs/updatewcs/npol.py b/lib/stwcs/updatewcs/npol.py
new file mode 100644
index 0000000..db928bf
--- /dev/null
+++ b/lib/stwcs/updatewcs/npol.py
@@ -0,0 +1,319 @@
+from __future__ import division # confidence high
+
+import pyfits
+from stsci.tools import fileutil
+import utils
+import numpy as np
+
+import logging, time
+logger = logging.getLogger('stwcs.updatewcs.npol')
+
+class NPOLCorr(object):
+ """
+ Defines a Lookup table prior distortion correction as per WCS paper IV.
+ It uses a reference file defined by the NPOLFILE (suffix 'NPL') keyword
+ in the primary header.
+
+ Notes
+ -----
+ - Using extensions in the reference file create a WCSDVARR extensions
+ and add them to the science file.
+ - Add record-valued keywords to the science extension header to describe
+ the lookup tables.
+ - Add a keyword 'NPOLEXT' to the science extension header to store
+ the name of the reference file used to create the WCSDVARR extensions.
+
+ If WCSDVARR extensions exist and `NPOLFILE` is different from `NPOLEXT`,
+ a subsequent update will overwrite the existing extensions.
+ If WCSDVARR extensions were not found in the science file, they will be added.
+
+ It is assumed that the NPL reference files were created to work with IDC tables
+ but will be applied with SIP coefficients. A transformation is applied to correct
+ for the fact that the lookup tables will be applied before the first order coefficients
+ which are in the CD matrix when the SIP convention is used.
+ """
+
+ def updateWCS(cls, fobj):
+ """
+ Parameters
+ ----------
+ fobj: pyfits object
+ Science file, for which a distortion correction in a NPOLFILE is available
+
+ """
+ logger.info("\n\tStarting CompSIP: %s" %time.asctime())
+ try:
+ assert isinstance(fobj, pyfits.HDUList)
+ except AssertionError:
+ logger.exception('\n\tInput must be a pyfits.HDUList object')
+ raise
+
+ cls.applyNPOLCorr(fobj)
+ nplfile = fobj[0].header['NPOLFILE']
+
+ new_kw = {'NPOLEXT': nplfile}
+ return new_kw
+
+ updateWCS = classmethod(updateWCS)
+
+ def applyNPOLCorr(cls, fobj):
+ """
+ For each science extension in a pyfits file object:
+ - create a WCSDVARR extension
+ - update science header
+ - add/update NPOLEXT keyword
+ """
+ nplfile = fileutil.osfn(fobj[0].header['NPOLFILE'])
+ # Map WCSDVARR EXTVER numbers to extension numbers
+ wcsdvarr_ind = cls.getWCSIndex(fobj)
+ for ext in fobj:
+ try:
+ extname = ext.header['EXTNAME'].lower()
+ except KeyError:
+ continue
+ if extname == 'sci':
+ extversion = ext.header['EXTVER']
+ ccdchip = cls.get_ccdchip(fobj, extname='SCI', extver=extversion)
+ binned = utils.getBinning(fobj, extversion)
+ header = ext.header
+ # get the data arrays from the reference file and transform them for use with SIP
+ dx,dy = cls.getData(nplfile, ccdchip)
+ idccoeffs = cls.getIDCCoeffs(header)
+
+ if idccoeffs != None:
+ dx, dy = cls.transformData(dx,dy, idccoeffs)
+
+ # Determine EXTVER for the WCSDVARR extension from the NPL file (EXTNAME, EXTVER) kw.
+ # This is used to populate DPj.EXTVER kw
+ wcsdvarr_x_version = 2 * extversion -1
+ wcsdvarr_y_version = 2 * extversion
+
+ for ename in zip(['DX', 'DY'], [wcsdvarr_x_version,wcsdvarr_y_version],[dx, dy]):
+ cls.addSciExtKw(header, wdvarr_ver=ename[1], npol_extname=ename[0])
+ hdu = cls.createNpolHDU(header, npolfile=nplfile, \
+ wdvarr_ver=ename[1], npl_extname=ename[0], data=ename[2],ccdchip=ccdchip, binned=binned)
+ if wcsdvarr_ind:
+ fobj[wcsdvarr_ind[ename[1]]] = hdu
+ else:
+ fobj.append(hdu)
+
+
+ applyNPOLCorr = classmethod(applyNPOLCorr)
+
+ def getWCSIndex(cls, fobj):
+
+ """
+ If fobj has WCSDVARR extensions:
+ returns a mapping of their EXTVER kw to file object extension numbers
+ if fobj does not have WCSDVARR extensions:
+ an empty dictionary is returned
+ """
+ wcsd = {}
+ for e in range(len(fobj)):
+ try:
+ ename = fobj[e].header['EXTNAME']
+ except KeyError:
+ continue
+ if ename == 'WCSDVARR':
+ wcsd[fobj[e].header['EXTVER']] = e
+ logger.debug("A map of WSCDVARR externsions %s" % wcsd)
+ return wcsd
+
+ getWCSIndex = classmethod(getWCSIndex)
+
+ def addSciExtKw(cls, hdr, wdvarr_ver=None, npol_extname=None):
+ """
+ Adds kw to sci extension to define WCSDVARR lookup table extensions
+
+ """
+ if npol_extname =='DX':
+ j=1
+ else:
+ j=2
+
+ cperror = 'CPERROR%s' %j
+ cpdis = 'CPDIS%s' %j
+ dpext = 'DP%s.' %j + 'EXTVER'
+ dpnaxes = 'DP%s.' %j +'NAXES'
+ dpaxis1 = 'DP%s.' %j+'AXIS.1'
+ dpaxis2 = 'DP%s.' %j+'AXIS.2'
+ keys = [cperror, cpdis, dpext, dpnaxes, dpaxis1, dpaxis2]
+ values = {cperror: 0.0, cpdis: 'Lookup', dpext: wdvarr_ver, dpnaxes: 2,
+ dpaxis1: 1, dpaxis2: 2}
+
+ comments = {cperror: 'Maximum error of NPOL correction for axis %s' % j,
+ cpdis: 'Prior distortion funcion type',
+ dpext: 'Version number of WCSDVARR extension containing lookup distortion table',
+ dpnaxes: 'Number of independent variables in distortion function',
+ dpaxis1: 'Axis number of the jth independent variable in a distortion function',
+ dpaxis2: 'Axis number of the jth independent variable in a distortion function'
+ }
+
+ for key in keys:
+ hdr.update(key=key, value=values[key], comment=comments[key], before='HISTORY')
+
+ addSciExtKw = classmethod(addSciExtKw)
+
+ def getData(cls,nplfile, ccdchip):
+ """
+ Get the data arrays from the reference NPOL files
+ Make sure 'CCDCHIP' in the npolfile matches "CCDCHIP' in the science file.
+ """
+ npl = pyfits.open(nplfile)
+ for ext in npl:
+ nplextname = ext.header.get('EXTNAME',"")
+ nplccdchip = ext.header.get('CCDCHIP',1)
+ if nplextname == 'DX' and nplccdchip == ccdchip:
+ xdata = ext.data.copy()
+ continue
+ elif nplextname == 'DY' and nplccdchip == ccdchip:
+ ydata = ext.data.copy()
+ continue
+ else:
+ continue
+ npl.close()
+ return xdata, ydata
+ getData = classmethod(getData)
+
+ def transformData(cls, dx, dy, coeffs):
+ """
+ Transform the NPOL data arrays for use with SIP
+ """
+ ndx, ndy = np.dot(coeffs, [dx.ravel(), dy.ravel()])
+ ndx.shape = dx.shape
+ ndy.shape=dy.shape
+ return ndx, ndy
+
+ transformData = classmethod(transformData)
+
+ def getIDCCoeffs(cls, header):
+ """
+ Return a matrix of the scaled first order IDC coefficients.
+ """
+ try:
+ ocx10 = header['OCX10']
+ ocx11 = header['OCX11']
+ ocy10 = header['OCY10']
+ ocy11 = header['OCY11']
+ coeffs = np.array([[ocx11, ocx10], [ocy11,ocy10]], dtype=np.float32)
+ except KeyError:
+ logger.exception('\n\tFirst order IDCTAB coefficients are not available. \n\
+ Cannot convert SIP to IDC coefficients.')
+ return None
+ try:
+ idcscale = header['IDCSCALE']
+ except KeyError:
+ logger.exception("IDCSCALE not found in header - setting it to 1.")
+ idcscale = 1
+
+ return np.linalg.inv(coeffs/idcscale)
+
+ getIDCCoeffs = classmethod(getIDCCoeffs)
+
+ def createNpolHDU(cls, sciheader, npolfile=None, wdvarr_ver=1, npl_extname=None,data = None, ccdchip=1, binned=1):
+ """
+ Creates an HDU to be added to the file object.
+ """
+ hdr = cls.createNpolHdr(sciheader, npolfile=npolfile, wdvarr_ver=wdvarr_ver, npl_extname=npl_extname, ccdchip=ccdchip, binned=binned)
+ hdu=pyfits.ImageHDU(header=hdr, data=data)
+ return hdu
+
+ createNpolHDU = classmethod(createNpolHDU)
+
+ def createNpolHdr(cls, sciheader, npolfile, wdvarr_ver, npl_extname, ccdchip, binned):
+ """
+ Creates a header for the WCSDVARR extension based on the NPOL reference file
+ and sci extension header. The goal is to always work in image coordinates
+ (also for subarrays and binned images. The WCS for the WCSDVARR extension
+ i ssuch that a full size npol table is created and then shifted or scaled
+ if the science image is a subarray or binned image.
+ """
+ npl = pyfits.open(npolfile)
+ for ext in npl:
+ try:
+ nplextname = ext.header['EXTNAME']
+ nplextver = ext.header['EXTVER']
+ except KeyError:
+ continue
+ nplccdchip = cls.get_ccdchip(npl, extname=nplextname, extver=nplextver)
+ if nplextname == npl_extname and nplccdchip == ccdchip:
+ npol_header = ext.header
+ break
+ else:
+ continue
+ npl.close()
+
+ naxis = pyfits.getval(npolfile, ext=1, key='NAXIS')
+ ccdchip = nplextname #npol_header['CCDCHIP']
+
+ kw = { 'NAXIS': 'Size of the axis',
+ 'CDELT': 'Coordinate increment along axis',
+ 'CRPIX': 'Coordinate system reference pixel',
+ 'CRVAL': 'Coordinate system value at reference pixel',
+ }
+
+ kw_comm1 = {}
+ kw_val1 = {}
+ for key in kw.keys():
+ for i in range(1, naxis+1):
+ si = str(i)
+ kw_comm1[key+si] = kw[key]
+
+ for i in range(1, naxis+1):
+ si = str(i)
+ kw_val1['NAXIS'+si] = npol_header.get('NAXIS'+si)
+ kw_val1['CDELT'+si] = npol_header.get('CDELT'+si, 1.0)
+ kw_val1['CRPIX'+si] = npol_header.get('CRPIX'+si, 0.0)
+ kw_val1['CRVAL'+si] = npol_header.get('CRVAL'+si, 0.0)
+
+ kw_comm0 = {'XTENSION': 'Image extension',
+ 'BITPIX': 'IEEE floating point',
+ 'NAXIS': 'Number of axes',
+ 'EXTNAME': 'WCS distortion array',
+ 'EXTVER': 'Distortion array version number',
+ 'PCOUNT': 'Special data area of size 0',
+ 'GCOUNT': 'One data group',
+ }
+
+ kw_val0 = { 'XTENSION': 'IMAGE',
+ 'BITPIX': -32,
+ 'NAXIS': naxis,
+ 'EXTNAME': 'WCSDVARR',
+ 'EXTVER': wdvarr_ver,
+ 'PCOUNT': 0,
+ 'GCOUNT': 1,
+ 'CCDCHIP': ccdchip,
+ }
+
+ cdl = pyfits.CardList()
+ for key in kw_comm0.keys():
+ cdl.append(pyfits.Card(key=key, value=kw_val0[key], comment=kw_comm0[key]))
+ for key in kw_comm1.keys():
+ cdl.append(pyfits.Card(key=key, value=kw_val1[key], comment=kw_comm1[key]))
+
+
+ hdr = pyfits.Header(cards=cdl)
+
+ return hdr
+
+ createNpolHdr = classmethod(createNpolHdr)
+
+ def get_ccdchip(cls, fobj, extname, extver):
+ """
+ Given a science file or npol file determine CCDCHIP
+ """
+ ccdchip = 1
+ if fobj[0].header['INSTRUME'] == 'ACS' and fobj[0].header['DETECTOR'] == 'WFC':
+ ccdchip = fobj[extname, extver].header['CCDCHIP']
+ elif fobj[0].header['INSTRUME'] == 'WFC3' and fobj[0].header['DETECTOR'] == 'UVIS':
+ ccdchip = fobj[extname, extver].header['CCDCHIP']
+ elif fobj[0].header['INSTRUME'] == 'WFPC2':
+ ccdchip = fobj[extname, extver].header['DETECTOR']
+ elif fobj[0].header['INSTRUME'] == 'STIS':
+ ccdchip = fobj[extname, extver].header['DETECTOR']
+ elif fobj[0].header['INSTRUME'] == 'NICMOS':
+ ccdchip = fobj[extname, extver].header['CAMERA']
+ return ccdchip
+
+ get_ccdchip = classmethod(get_ccdchip)
+ \ No newline at end of file
diff --git a/lib/stwcs/updatewcs/utils.py b/lib/stwcs/updatewcs/utils.py
new file mode 100644
index 0000000..29ba5f3
--- /dev/null
+++ b/lib/stwcs/updatewcs/utils.py
@@ -0,0 +1,28 @@
+from __future__ import division # confidence high
+
+def diff_angles(a,b):
+ """
+ Perform angle subtraction a-b taking into account
+ small-angle differences across 360degree line.
+ """
+
+ diff = a - b
+
+ if diff > 180.0:
+ diff -= 360.0
+
+ if diff < -180.0:
+ diff += 360.0
+
+ return diff
+
+def getBinning(fobj, extver=1):
+ # Return the binning factor
+ binned = 1
+ if fobj[0].header['INSTRUME'] == 'WFPC2':
+ mode = fobj[0].header.get('MODE', "")
+ if mode == 'AREA': binned = 2
+ else:
+ binned = fobj['SCI', extver].header.get('BINAXIS',1)
+ return binned
+