本文整理汇总了Python中pyfits.open函数的典型用法代码示例。如果您正苦于以下问题:Python open函数的具体用法?Python open怎么用?Python open使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了open函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: loadFiles
def loadFiles(
imName = None,
maskName = None,
satMaskName = None,
invertMask = False,
):
"""Load a new image and/or mask and/or satMask from a fits file.
Inputs:
- imName: path to image FITS file; None to use current image
- maskName: path to bad pixel mask; 0=good unless invertMask is true;
None to use current mask, if any
- satMaskName: path to saturated pixel mask; 0=good regardless of invertMask;
None to use current mask, if any
"""
global im, imFits, mask, maskFits, satMask, satMaskFits, isSat, sd
if imName:
imFits = pyfits.open(imName)
print("Loading image %s into imFits and im" % (imName,))
dataSec = parseDataSec(imFits[0].header.get("DATASEC"))
dataShape = imFits[0].data.shape
if dataSec is None:
dataSec = [0, dataShape[0], 0, dataShape[1]]
im = imFits[0].data[dataSec[0]:dataSec[1], dataSec[2]:dataSec[3]]
if maskName:
print("Loading bad pixel mask %s into maskFits and mask" % (maskName,))
maskFits = pyfits.open(maskName)
mask = maskFits[0].data[dataSec[0]:dataSec[1], dataSec[2]:dataSec[3]] > 0.1
if satMaskName:
print("Loading saturated pixel mask %s into satMaskFits and satMask" % (satMaskName,))
satMaskFits = pyfits.open(satMaskName)
satMask = satMaskFits[0].data[dataSec[0]:dataSec[1], dataSec[2]:dataSec[3]] > 0.1
return im, mask, satMask
示例2: plot_psi_weights
def plot_psi_weights(output,
modelfile='/d/monk/eigenbrot/WIYN/14B-0456/anal/models/allZ2_vardisp/allz2_vardisp_batch_interp.fits'):
#Like the last page of all the fit plots, but for all pointings at once
#cribbed from plot_bc_vardisp.py
m = pyfits.open(modelfile)[1].data[0]
numZ = np.unique(m['Z'][:,0]).size
numAge = np.unique(m['AGE'][:,0]).size
big_W = np.zeros((numZ,numAge))
for p in range(6):
coeffile = 'NGC_891_P{}_bin30_allz2.coef.fits'.format(p+1)
print coeffile
coef_arr = pyfits.open(coeffile)[1].data
numap = coef_arr['VSYS'].size
for i in range(numap):
wdata = coef_arr[i]['LIGHT_FRAC'].reshape(numZ,numAge)
big_W += wdata/np.max(wdata)
bwax = plt.figure().add_subplot(111)
bwax.imshow(big_W,origin='lower',cmap='Blues',interpolation='none')
bwax.set_xlabel('SSP Age [Gyr]')
bwax.set_xticks(range(numAge))
bwax.set_xticklabels(m['AGE'][:numAge,0]/1e9)
bwax.set_ylabel(r'$Z/Z_{\odot}$')
bwax.set_yticks(range(numZ))
bwax.set_yticklabels(m['Z'][::numAge,0])
pp = PDF(output)
pp.savefig(bwax.figure)
pp.close()
plt.close(bwax.figure)
return
示例3: combine_off_on
def combine_off_on(maskname, band, options, lampsOff=False):
'''
combine list of flats into a flat file'''
file_off = os.path.join("combflat_lamps_off_2d_%s.fits"
% (band))
file_on = os.path.join("combflat_2d_%s.fits"
% (band))
file_on_save = os.path.join("combflat_lamps_on_2d_%s.fits"
% (band))
hdu_off = pyfits.open(file_off)
hdu_on = pyfits.open(file_on)
#save lamps On data set to new name
hdu_on.writeto(file_on_save, clobber=True)
hdu_on[0].data = hdu_on[0].data - hdu_off[0].data
#Add comment that the difference was completed
hdu_on[0].header.add_history("Differenced the Lamps on and Lamps off images ")
#save lamps On data set to new name
hdu_on.writeto(file_on, clobber=True)
示例4: test_parnames_round_trip
def test_parnames_round_trip(self):
"""
Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/130
Ensures that opening a random groups file in update mode or writing it
to a new file does not cause any change to the parameter names.
"""
# Because this test tries to update the random_groups.fits file, let's
# make a copy of it first (so that the file doesn't actually get
# modified in the off chance that the test fails
self.copy_file('random_groups.fits')
parameters = ['UU', 'VV', 'WW', 'BASELINE', 'DATE']
with fits.open(self.temp('random_groups.fits'), mode='update') as h:
assert h[0].parnames == parameters
h.flush()
# Open again just in read-only mode to ensure the parnames didn't
# change
with fits.open(self.temp('random_groups.fits')) as h:
assert h[0].parnames == parameters
h.writeto(self.temp('test.fits'))
with fits.open(self.temp('test.fits')) as h:
assert h[0].parnames == parameters
示例5: fitstoarrays
def fitstoarrays(ffile,fmask):
fitsfile = pyfits.open(ffile)
data = fitsfile[0].data
header = pyfits.getheader(ffile)
naxis1 = header['naxis1']
naxis2 = header['naxis2']
cdelt1 = header['cdelt1']
cdelt2 = header['cdelt2']
crpix1 = header['crpix1']
crpix2 = header['crpix2']
crval1 = header['crval1']
crval2 = header['crval2']
X = zeros(data.shape)
Y = zeros(data.shape)
for j in range(data.shape[0]):
for i in range(data.shape[1]):
X[j,i] = (1+i)*cdelt1
Y[j,i] = (1+j)*cdelt2
maskfile = pyfits.open(fmask)
datam = maskfile[0].data
mask = datam!=0
#Z = (X**2+Y**2)
return X[mask],Y[mask],data[mask]
示例6: CoaddExposures
def CoaddExposures(exposures, outnumber, datapath='/nfs/lsst2/photocalData/data/observer2/', verbose=False, normalise=False):
import pyfits as pf
import shutil
import sys
N_HDUS = 70
print 'Coadding %s'%(exposures); sys.stdout.flush()
n_exp = float(len(exposures))
filenames = [datapath + 'DECam_00' + str(_) + '.fits.fz' for _ in exposures]
outfilename = datapath + 'DECam_0' + str(9000000 + outnumber) + '.fits.fz'
shutil.copyfile(filenames[0],outfilename,)
out_file = pf.open(outfilename, mode='update')
primaryHeader = out_file[0].header
total_EXPTIME = primaryHeader['EXPTIME']
total_EXPREQ = primaryHeader['EXPREQ']
total_DARKTIME = primaryHeader['DARKTIME']
# convert all arrays to floats for summing and dividing purposes
if verbose: print 'loading first file & converting dtype'
for hdu in range(1, N_HDUS+1):
out_file[hdu].data = out_file[hdu].data.astype(np.float32)
# add other files to the original, collecting relevant metadata
for i, filename in enumerate(filenames[1:]):
this_file = pf.open(filename)
total_EXPTIME += this_file[0].header['EXPTIME']
total_EXPREQ += this_file[0].header['EXPREQ']
total_DARKTIME += this_file[0].header['DARKTIME']
for hdu in range(1, N_HDUS+1):
if verbose: print 'adding hdu %s for file %s of %s'%(hdu,i+2,n_exp)
out_file[hdu].data += this_file[hdu].data
# Normalise
if normalise:
for hdu in range(1, N_HDUS+1):
if verbose: print 'Normalising hdu %s'%hdu
out_file[hdu].data /= n_exp
# Update headers
primaryHeader['nCOADDED'] = n_exp
primaryHeader['filename'] = 'DECam_0' + str(9000000 + outnumber) + '.fits'
primaryHeader['expnum'] = 9000000 + outnumber
primaryHeader['COADD_OF'] = str(['DECam_00' + str(_) for _ in exposures]).translate(None, ''.join(['[',']',' ','\'']))
primaryHeader['COADNUMS'] = (str(exposures).translate(None, ''.join(['[',']',' '])))
if not normalise: n_exp = 1.
primaryHeader['NORMED'] = str(normalise)
primaryHeader['EXP_TOT'] = total_EXPTIME # always equal to the total exposure time
primaryHeader['DARK_TOT'] = total_DARKTIME # always equal to the total darktime
primaryHeader['EXP_T_EQ'] = total_EXPTIME / n_exp #equivalent expousre time, depending on noralisation
primaryHeader['EXPREQ'] = total_EXPREQ / n_exp #equivalent EXPREQ time, depending on noralisation
primaryHeader['DARKTIME'] = total_DARKTIME / n_exp #equivalent DARKTIME time, depending on noralisation
if verbose: print 'Headers updated, writing to disk...'; sys.stdout.flush()
out_file.flush()
out_file.close()
if verbose: print 'Fished coaddition of %s, written to %s'%(exposures, outfilename)
示例7: test_save_backup
def test_save_backup(self):
"""Test for https://trac.assembla.com/pyfits/ticket/121
Save backup of file before flushing changes.
"""
self.copy_file('scale.fits')
with ignore_warnings():
with fits.open(self.temp('scale.fits'), mode='update',
save_backup=True) as hdul:
# Make some changes to the original file to force its header
# and data to be rewritten
hdul[0].header['TEST'] = 'TEST'
hdul[0].data[0] = 0
assert os.path.exists(self.temp('scale.fits.bak'))
with fits.open(self.data('scale.fits'),
do_not_scale_image_data=True) as hdul1:
with fits.open(self.temp('scale.fits.bak'),
do_not_scale_image_data=True) as hdul2:
assert hdul1[0].header == hdul2[0].header
assert (hdul1[0].data == hdul2[0].data).all()
with ignore_warnings():
with fits.open(self.temp('scale.fits'), mode='update',
save_backup=True) as hdul:
# One more time to see if multiple backups are made
hdul[0].header['TEST2'] = 'TEST'
hdul[0].data[0] = 1
assert os.path.exists(self.temp('scale.fits.bak'))
assert os.path.exists(self.temp('scale.fits.bak.1'))
示例8: compute_DC
def compute_DC(pointing, folder, uw_chi):
CI_file = glob('{}/*P{}*CI*.dat'.format(folder,pointing))[0]
bestZ = np.loadtxt(CI_file, usecols=(5,), unpack=True, dtype=np.int)
fzlist = ['0.005Z','0.02Z','0.2Z','0.4Z','1Z','2.5Z','allZ']
hdu = pyfits.open('NGC_891_P{}_bin30.mso.fits'.format(pointing))[0]
head = hdu.header
data = hdu.data
error = pyfits.open('NGC_891_P{}_bin30.meo.fits'.format(pointing))[0].data
wave = (np.arange(data.shape[1]) - head['CRPIX1'] - 1)*head['CDELT1'] + head['CRVAL1']
idx = np.where((wave >= 3800.) & (wave <= 6800.))[0]
wave = wave[idx]
data = data[:,idx]
error = error[:,idx]
outarr = np.zeros(data.shape[0])
for i, bz in enumerate(bestZ):
best_file = '{}/{}/NGC_891_P{}_bin30_allz2.fit.fits'.\
format(folder,fzlist[bz],pointing)
print i+1, fzlist[bz]
models = pyfits.open(best_file)[0].data
coef_file = '{}/{}/NGC_891_P{}_bin30_allz2.coef.fits'.\
format(folder,fzlist[bz],pointing)
coefs = pyfits.open(coef_file)[1].data
chisq = np.sum((data[i,:] - models[i,:])**2/error[i,:]**2)/coefs['TOTFREE'][i]
outarr[i] = uw_chi[i] - chisq
return outarr
示例9: skyvar
def skyvar(wave, sky = 'kecksky.fits'):
if False:
sky = pyfits.open("kecksky.fits")
crval = sky[0].header['CRVAL1']
delta = sky[0].header['CDELT1']
sky_flux = sky[0].data[0]
print "Keck Sky used"
else:
sky = pyfits.open("licksky.fits")
crval = sky[0].header['CRVAL1']
delta = sky[0].header['CDELT1']
sky_flux = sky[0].data
print "Lick sky used"
start = crval - math.ceil(0.5*len(sky_flux)*delta)
stop = crval + math.ceil(0.5*len(sky_flux)*delta)
sky_wave = [(start+delta*i) for i in range(len(sky_flux))]
plt.plot(sky_wave, sky_flux)
plt.show()
return new_sky
示例10: fig11
def fig11():
hdulist1 = pf.open('/import/phy-pc1064_a/Documents/SiOJets_New/run4ALMA/imageShock_J2-1_45deg_molcool_dirty.fits.gz')
hdulist2 = pf.open('/import/phy-pc1064_a/Documents/SiOJets_New/run4ALMA/imageShock_J5-4_45deg_molcool_dirty.fits.gz')
hdulist3 = pf.open('/import/phy-pc1064_a/Documents/SiOJets_New/run4ALMA/imageShock_J8-7_45deg_molcool_dirty.fits.gz')
A = hdulist1[0].header
RAarr = [A['CRVAL1'] - i*A['CDELT1'] for i in range(A['NAXIS1']/2)] + [A['CRVAL1'] + i*A['CDELT1'] for i in range(A['NAXIS1']/2,A['NAXIS1'])]
DECarr = [A['CRVAL2'] - i*A['CDELT2'] for i in range(A['NAXIS2']/2)] + [A['CRVAL2'] + i*A['CDELT2'] for i in range(A['NAXIS2']/2,A['NAXIS2'])]
RAarr2 = RA2ICRS(RAarr)
DECarr2 = DEC2ICRS(DECarr)
VELarr2 = Velarr(pi/4.,100.0, A)
print VELarr2[0]
clevs1 = [0.0005, 0.001, 0.005, 0.025, 0.125]
clevs2 = [0.01,0.05, 0.1, 0.2, 0.4,0.8,1.2]
# Get ALMA images
f1 = plt.figure(figsize=[11,8])
plt.subplots_adjust(wspace=0.05)
ax1 = f1.add_subplot(121)
im1 = imshow(hdulist1[0].data[:,:,:].sum(0),origin='image',vmin = 0.001, vmax = 0.25,cmap=cm.gist_heat)
plt.colorbar(im1,ticks=clevs2)
im2 = contour(hdulist3[0].data[:,:,:].sum(0),levels=clevs2,colors='b',linewidths=.70)
im3 = contour(hdulist2[0].data[:,:,:].sum(0),levels=clevs2,colors='g',linewidths=.70)
axis([100.0,250.0,0.0,320.0])
ax1.xaxis.set_major_locator(MaxNLocator(4))
ax1.yaxis.set_major_locator(MaxNLocator(4))
locs,labels = plt.xticks()
plt.xticks(locs,[RAarr2[int(i)] for i in locs],rotation=15)
locs,labels = plt.yticks()
plt.yticks(locs[1:-1],[DECarr2[int(i)] for i in locs[1:-1]],rotation=90)
ax1.set_ylabel('Declination [J2000]')
ax1.set_xlabel('Right Ascention [J2000]')
plt.figtext(0.16,0.2,r'Image [Jy/beam] : 2-$>$1',color='r')
plt.figtext(0.16,0.175,r'Green Contour : 5-$>$4',color='g')
plt.figtext(0.16,0.15,r'Blue Contour : 8-$>$7',color='b')
ax2 = f1.add_subplot(122)
im1b = imshow(hdulist1[0].data[:,:,175].T,origin='image', vmax = 0.025,cmap=cm.gist_heat)
plt.colorbar(im1b,ticks=clevs1)
im2b = contour(hdulist3[0].data[:,:,175].T,levels=clevs1,colors='b',linewidths=.70)
im3b = contour(hdulist2[0].data[:,:,175].T,levels=clevs1,colors='g',linewidths=.70)
axis([0.0,150.0,0.0,320.0])
ax2.xaxis.set_major_locator(MaxNLocator(4))
#ax2.yaxis.set_major_locator(MaxNLocator(4))
locs,labels = plt.xticks()
plt.xticks(locs,[str('%.1f'%VELarr2[int(i)]) for i in locs])
plt.setp(ax2,yticks=[])
ax2.set_xlabel(r'Velocity [km s$^{-1}$]')
plt.figtext(0.53,0.2,r'PV Diagram [Jy/beam] : 2-$>$1',color='r')
plt.figtext(0.53,0.175,r'Green Contour : 5-$>$4',color='g')
plt.figtext(0.53,0.15,r'Blue Contour : 8-$>$7',color='b')
plt.show()
示例11: FixTHINGS
def FixTHINGS(imageIn, imageOut):
print
sys.stdout.write('Fixing file %s ... ' % imageIn)
sys.stdout.flush()
if imageOut != imageIn:
hdu = pf.open(imageIn)
else:
hdu = pf.open(imageIn, mode='update')
dataNew = hdu[0].data[0,0,:,:]
del hdu[0].header['CTYPE3']; del hdu[0].header['CDELT3']; del hdu[0].header['CRVAL3']
del hdu[0].header['CRPIX3']; del hdu[0].header['CROTA3']
del hdu[0].header['CTYPE4']; del hdu[0].header['CDELT4']; del hdu[0].header['CRVAL4']
del hdu[0].header['CRPIX4']; del hdu[0].header['CROTA4']
if imageOut != imageIn:
if os.path.exists(imageOut): os.remove(imageOut)
pf.writeto(imageOut, dataNew, hdu[0].header)
else:
hdu[0].data = dataNew
hdu.flush()
print 'Done'
print
return
示例12: test_hdu_fromstring
def test_hdu_fromstring(self):
"""
Tests creating a fully-formed HDU object from a string containing the
bytes of the HDU.
"""
dat = open(self.data('test0.fits'), 'rb').read()
offset = 0
with fits.open(self.data('test0.fits')) as hdul:
hdulen = hdul[0]._data_offset + hdul[0]._data_size
hdu = fits.PrimaryHDU.fromstring(dat[:hdulen])
assert isinstance(hdu, fits.PrimaryHDU)
assert hdul[0].header == hdu.header
assert hdu.data is None
hdu.header['TEST'] = 'TEST'
hdu.writeto(self.temp('test.fits'))
with fits.open(self.temp('test.fits')) as hdul:
assert isinstance(hdu, fits.PrimaryHDU)
assert hdul[0].header[:-1] == hdu.header[:-1]
assert hdul[0].header['TEST'] == 'TEST'
assert hdu.data is None
with fits.open(self.data('test0.fits'))as hdul:
for ext_hdu in hdul[1:]:
offset += hdulen
hdulen = len(str(ext_hdu.header)) + ext_hdu._data_size
hdu = fits.ImageHDU.fromstring(dat[offset:offset + hdulen])
assert isinstance(hdu, fits.ImageHDU)
assert ext_hdu.header == hdu.header
assert (ext_hdu.data == hdu.data).all()
示例13: test_uint
def test_uint(self):
hdulist_f = fits.open(self.data('o4sp040b0_raw.fits'))
hdulist_i = fits.open(self.data('o4sp040b0_raw.fits'), uint=True)
assert hdulist_f[1].data.dtype == np.float32
assert hdulist_i[1].data.dtype == np.uint16
assert np.all(hdulist_f[1].data == hdulist_i[1].data)
示例14: plant
def plant(image,psf,outfile,list,dtime):
import pyfits,os
import numarray as N
psf_f=pyfits.open(psf)
psf_flux=psf_f[0].data.sum()
psf_x_size=psf_f[0].header.get('NAXIS1',0)
psf_y_size=psf_f[0].header.get('NAXIS2',0)
psf_x=psf_f[0].header.get('PSF_X',0)
psf_y=psf_f[0].header.get('PSF_Y',0)
psf_mag=psf_f[0].header.get('PSFMAG',26.0)
image_f=pyfits.open(image)
xmax=image_f[0].header.get('NAXIS1',0)
ymax=image_f[0].header.get('NAXIS2',0)
exptime=image_f[0].header.get('EXPTIME',1)
zeropoint=image_f[0].header.get('PHOT_C',26.5)
import mop_files
ahdu=mop_files.read(list)
import string,math,re
from numarray.nd_image.interpolation import shift as shift
from string import atof
for i in range(len(ahdu['data']['x'])):
x=float(ahdu['data']['x'][i])
y=float(ahdu['data']['y'][i])
mag=float(ahdu['data']['mag'][i])
rate=float(ahdu['data']['pix_rate'][i])/3600.0
angle=float(ahdu['data']['angle'][i])
x_shift_rate=rate*math.cos(angle/57.3)
y_shift_rate=rate*math.sin(angle/57.3)
#flux=exptime*10**((zeropoint-mag)/2.5)
#scale=flux/psf_flux
scale=10**((psf_mag-mag)/2.5)*exptime
#print scale
niter=int(rate*exptime)+1
scale=scale/niter
dt = exptime/niter
#print x,y,mag,niter
for i in range(niter):
curtime = dtime+dt*i
x=x+x_shift_rate*curtime
y=y+y_shift_rate*curtime
x1=int(max(0,x-psf_x))
x2=int(min(xmax,x+psf_x_size-psf_x))
y1=int(max(0,y-psf_y))
y2=int(min(ymax,y+psf_y_size-psf_y))
#print x2,x1,y2,y1
px1=int((psf_x-(x-x1)))
px2=int(px1+(x2-x1))
py1=int(psf_y-(y-y1))
py2=int(py1+(y2-y1))
sec = psf_f[0].data[py1:py2,px1:px2].copy()
sec = shift(sec,(y-int(y),x-int(x)),order=3)
#print sec.shape,y2-y1,x2-x1
#print "Adding @ ",x,y,mag,scale," data=> ",y1,y2,x1,x2," PSF=> ",py1,py2,px1,px2
image_f[0].data[y1:y2,x1:x2]+=scale*sec
image_f.writeto(outfile)
image_f.close()
示例15: Select
def Select(dirin, filein , selfn, dirout,
overwrite=False):
"Select a subsample from ma table "
"""
If overwrite is False, will skip existing files in destination
directory.
"""
mask=[ selfn(row) for row in pyfits.open(filein)[1].data ]
mind=[ x for x,f in enumerate (mask) if f]
nrows=len(mind)
print "Selected %i rows." % nrows
fitsel=re.compile(".*fits?")
flist = [ fnamein for fnamein in os.listdir(dirin) if fitsel.match(fnamein) ]
for fnamein in flist:
foutname=os.path.join(dirout,fnamein)
if os.access(foutname, os.F_OK) and (not overwrite):
print "Skipping %s as it already exists" % fnamein
else:
fin=pyfits.open(os.path.join(dirin,fnamein))
newtab=pyfits.new_table( fin[1].columns , nrows= nrows)
for cname in fin[1].columns.names:
newtab.data.field(cname)._copyFrom( fin[1].data.field(cname)[ mind] )
Write([pyfits.PrimaryHDU(), newtab],
foutname,
overwrite=1)