本文整理汇总了Python中scipy.io.idl.readsav函数的典型用法代码示例。如果您正苦于以下问题:Python readsav函数的具体用法?Python readsav怎么用?Python readsav使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了readsav函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: lee4sst
def lee4sst(fileplace1,fileplace2,fileplace3,fileplace4):
"""
Lee archivos '/path/to/file/bi1yymmdd.save' o
'/path/to/file/rs1yymmdd.hh00.save' y devuelve un
vector-tiempo (fds) y la estructura completa de da-
tos (biors) de dos archivos consecutivos y concatenados.
"""
pathplusfile1,pathplusfile2,pathplusfile3,pathplusfile4 = fileplace1,fileplace2,fileplace3,fileplace4
biors1 = readsav(pathplusfile1,python_dict=False,verbose=False)
biors2 = readsav(pathplusfile2,python_dict=False,verbose=False)
biors3 = readsav(pathplusfile3,python_dict=False,verbose=False)
biors4 = readsav(pathplusfile4,python_dict=False,verbose=False)
yyyy = np.int(pathplusfile1[37:39])+2000
mm = np.int(pathplusfile1[39:41])
dd = np.int(pathplusfile1[41:43])
obsday = int(datetime(yyyy,mm,dd,0,0).strftime('%s'))
biors1234= AttrDict({'pm_daz':[],'off':[],'azierr':[],'eleerr':[],'x_off':[],'elepos':[],'azipos':[],'pos_time':[],
'recnum':[],'opmode':[],'time':[],'pm_del':[],'gps_status':[],'adcval':[],'y_off':[],'target':[]})
for items in biors1:
a,b,c,d = biors1[items],biors2[items],biors3[items],biors4[items]
biors1234[items] = np.concatenate((a,b,c,d))
thatday = obsday + biors1234['time']/1.e4
dts = map(datetime.fromtimestamp,thatday)
fds = dates.date2num(dts)
return fds,biors1234
示例2: cos_lsf_new
def cos_lsf_new(lam,version):
if lam < 1800.0:
if lam > 1450.0:
chan ='g160m'
else:
chan = 'g130m'
else:
chan = 'g225m'
if (version == 'new' and ((chan == 'g130m') or (chan == 'g160m'))):
q = readsav('C:/Users/Will Evonosky/Dropbox/SOARS 2016/Programs/H2 Fluoresence Code/cos_lsf_new.idl')
#print '/Users/Matt/IDLWorkspace80/COS_FIT/cos_lsf_new2.idl'
else:
q = readsav('C:/Users/Will Evonosky/Dropbox/SOARS 2016/Programs/H2 Fluoresence Code/cos_lsf.idl')
#print '/Users/Matt/IDLWorkspace80/COS_FIT/cos_lsf2.idl'
chan = np.where(chan == q.lsfchan)
chan = int(chan[0])
#print np.shape(q.lsf)
lamind = np.argmin(np.abs(q.lsfwave[chan,:]-lam))
lsfstart = q.lsf[:,lamind,chan]
lsf = lsfstart.T
xarray = lam+q.lsfpix*0.001*q.lsfpixscale[chan]
return (lsf,xarray)
开发者ID:wevonosky,项目名称:Molecular-Hydrogen-Fluorescence-Model,代码行数:26,代码来源:Model_Chi_Grid_Complete_July6.py
示例3: ps_cut
def ps_cut(power_2d,power_diff,bin_file=None,thresh=1e7,kperp_min_cut=0.003,kperp_max_cut=0.05,kpara_min_cut=0.11,kpara_max_cut=1.08,coarse_band_extent=2):
d = readsav(power_2d)
diff = readsav(power_diff)
kperp = d['kperp_edges']
kx = diff['kx_mpc']
ky = diff['ky_mpc']
kz = diff['kz_mpc']
h = diff['hubble_param']
k = np.zeros((kx.size,kx.size))
for ii in range(kx.size):
for jj in range(ky.size):
k[jj][ii]=np.sqrt(kx[ii]**2+ky[jj]**2)
k=k.flatten()
#dat = organize_power(d,'power_3d')
#dif = organize_power(diff,'power_diff')
if bin_file is None:
print "find pixels using model"
q = np.logical_and(d['power']<thresh,d['power']>0)
else:
fbin = readsav(bin_file)
q = np.array(fbin['bin_1to2d']).astype(bool)
r = np.zeros((kz.size,kx.size*ky.size),dtype=bool)
n = np.digitize(k,kperp)
q[:,np.where(kperp[:-1] > kperp_max_cut)[0]] = False
q[:,np.where(kperp[1:] < kperp_min_cut)[0]] = False
q[np.where(kz > kpara_max_cut)[0]] = False
q[np.where(kz < kpara_min_cut)[0]] = False
q[:,12] = False
q[:,13] = False
q[:,21] = False
for ii in range(coarse_band_extent):
q[24+ii::24] = False
q[24-ii::24] = False
for nn in range(kperp.size-1):
if kperp[nn] > kperp_max_cut: continue
if kperp[nn] < kperp_min_cut: continue
for zi in range(kz.size):
if kz[zi] < kpara_min_cut: continue
if kz[zi] > kpara_max_cut: continue
if q[zi][nn]:
ind0 = np.where(n==nn+1)
r[zi][ind0[0]] = True
# return r
dif = diff['power_diff']*(h**3)
wgt = diff['weight_diff']
#wgt /= np.sum(wgt)
r = r.reshape(dif.shape)
ind = np.where(r)
cut = dif[ind]#*(wgt[ind]>10.*np.mean(wgt))
#n0ind = np.nonzero(cut)
kzpower = np.divide(np.sum(dif*wgt*r,axis=(1,2)),np.sum(wgt*r,axis=(1,2)))
return q,d['power']*(h**3), cut, wgt[ind], kzpower, kz/h
示例4: read_cluster_grinder
def read_cluster_grinder(filepath):
''' Import Robs Spitzer data
read Rob's IDL format and make it into a a catalog,
deleting multiple columns and adding identifiers
Parameters
----------
filepath : string
Path to a directory that holds the output of the ClusterGrinder
pipeline. All files need to have standard names.
Specifically, this routine reads:
- ``cg_merged_srclist_mips.sav``
- ``cg_classified.sav``
Returns
-------
cat : astropy.table.Table
Table with 2MASS ans Spitzer magnitudes and the clustergrinder
classification.
'''
s = readsav(os.path.join(filepath, 'cg_merged_srclist_mips.sav'))
coo=np.ma.array(s.out[:,0:20],mask=(s.out[:,0:20] == 0.))
s.out[:,20:30][np.where(s.out[:,20:30] < -99)] = np.nan
s.out[:,30:40][np.where(s.out[:,30:40]==10)] = np.nan
dat = Table()
dat.add_column(Column(name='RA', data=np.ma.mean(coo[:,[0,2,4,12,14,16,18]],axis=1), unit = 'deg', format = '9.6g'))
#RA is avarage of all valid (non-zero) Ra values in 2MASS JHK, IRAC 1234
dat.add_column(Column(name='DEC', data=np.ma.mean(coo[:,[1,3,5,13,15,17,19]],axis=1), unit='deg', format='+9.6g'))
robsyukyformat={'J_MAG': 20,'H_MAG': 21, 'K_MAG': 22,'J_ERR': 30,
'H_ERR': 31,'K_ERR': 32,'IRAC_1': 26,'IRAC_2': 27,
'IRAC_3': 28, 'IRAC_4': 29,'IRAC_1_ERR':36,'IRAC_2_ERR':37,
'IRAC_3_ERR':38, 'IRAC_4_ERR':39}
for col in robsyukyformat:
dat.add_column(Column(name=col, data=s.out[:, robsyukyformat[col]], unit='mag', format='4.2g'))
s.mips[:,2][np.where(s.mips[:,2] == -100)] = np.nan
s.mips[:,3][np.where(s.mips[:,3] == 10)] = np.nan
dat.add_column(Column(name='MIPS', data=s.mips[:,2], unit='mag', format='4.2g'))
dat.add_column(Column(name='MIPS_ERR',data=s.mips[:,3], unit='mag', format='4.2g'))
IRclass = readsav(os.path.join(filepath, 'cg_classified.sav'))
dat.add_column(Column(name='IRclass', dtype='|S5', length=len(dat)))
for n1, n2 in zip(['wdeep', 'w1','w2','wtd','w3'], ['I*', 'I', 'II', 'II*', 'III']):
if n1 in IRclass:
dat['IRclass'][IRclass[n1]] = n2
dat.add_column(Column(name='AK', data=IRclass.ak, unit='mag', format='4.2g'))
return dat
示例5: plotlimit
def plotlimit(f1,f2,sz=(10,10)):
fig,axs=plt.subplots(3,2,sharex=True,sharey=True,figsize=sz)
deor = readsav('/users/wl42/IDL/FHD/catalog_data/eor_power_1d.idlsave')
keor = deor['k_centers']
peor = deor['power']
z=['7.1','6.8','6.5']
pol=['E-W','N-S']
for ii in range(6):
ix=ii/2
iy=ii%2
fb=f1[ii]
fl=f2[ii]
d=readsav(fl)
h=d['hubble_param']
k0=keor/h
p0=peor*keor**3/2/np.pi**2
kb,pb,pbup,sb=get_1d_limit(fb)
kl,pl,plup,sl=get_1d_limit(fl)
pl[np.where(pl==0)]=np.nan
axs[ix][iy].set_xlim(0.15,1.2)
axs[ix][iy].set_ylim(10,1e7)
axs[ix][iy].set_title('z='+z[ix]+' '+pol[iy])
axs[ix][iy].set_xscale('log')
axs[ix][iy].set_yscale('log')
m1,m2,m3=None,None,None
if iy==0: axs[ix][iy].set_ylabel('$\Delta^2$ ($mK^2$)')
if ix==2: axs[ix][iy].set_xlabel('$k$ ($h$ $Mpc^{-1}$)')
if ii==1:
m1='Fiducial Theory'
m2='2016 2-$\sigma$ Upper Limit'
m3='2016 Noise Level'
axs[ix][iy].step(k0,p0,where='mid',c='r',label=m1)
axs[ix][iy].step(kb,pbup,where='mid',c='c',label=m2)
axs[ix][iy].step(kb,sb,where='mid',c='c',linestyle='--',label=m3)
l1,l2,l3=None,None,None
if ii==0:
l1='Measured Power'
l2='Noise Level'
l3='2-$\sigma$ Upper Limit'
axs[ix][iy].step(kl,pl,where='mid',c='k',label=l1)
axs[ix][iy].step(kl,sl,where='mid',c='k',linestyle='--',label=l2)
axs[ix][iy].step(kl,plup,where='mid',c='indigo',label=l3)
axs[ix][iy].fill_between(xtostep(kl),ytostep(pl)-2*ytostep(sl),ytostep(plup),color='silver',alpha=0.8)
axs[ix][iy].grid(axis='y')
plt.subplots_adjust(top=0.94,bottom=0.12,left=0.07,right=0.965,hspace=0.21,wspace=0.005)
axs[0][0].legend(loc=2,ncol=3,fontsize='x-small')
axs[0][1].legend(loc=2,ncol=3,fontsize='x-small')
plt.show()
示例6: load_matfile
def load_matfile(obj):
file=obj.path2fullpath(modename=modename,
pathname=pathname)
if file == '': return
### clean first
for name, child in obj.get_children():
child.destroy()
def idl2dict(dd, cls = collections.OrderedDict):
r = cls()
for name in dd.keys():
print name
print isinstance(dd[name], np.recarray)
if isinstance(dd[name], np.recarray):
r[name] = rec2dict(dd[name], cls=cls)
else:
r._var0[name] = dd[name]
return r
nm0 = readsav(file)
if nm0 is None:
nm0 = IDLfile()
else:
nm0 = idl2dict(nm0, cls = IDLfile)
nm = IDLfile();nm['data'] = nm0
obj.setvar0(nm)
示例7: load_gains_fhd
def load_gains_fhd(fhdsav):
fhd_cal = readsav(fhdsav,python_dict=True)
gfhd = {'x':{},'y':{}}
for a in range(fhd_cal['cal']['N_TILE'][0]):
gfhd['x'][a] = fhd_cal['cal']['GAIN'][0][0][a]
gfhd['y'][a] = fhd_cal['cal']['GAIN'][0][1][a]
return gfhd
示例8: interpolate_along_trajectory
def interpolate_along_trajectory(latitude, longitude, description=None, fname=None):
"""Longitudes are necessarily positive east, as in the idlsaveile"""
if latitude.shape != longitude.shape:
raise ValueError('Shape mismatch')
if not fname:
fname = get_file()
data = readsav(fname, verbose=False)
inx_lat = (np.floor(latitude) + 90).astype(np.int)
inx_lon = np.floor(((longitude % 360.) + 360.) % 360.).astype(np.int)
if description:
img = data[description]
return img[inx_lat, inx_lon]
else:
output = dict()
for d in list(data.keys()):
if not 'percent' in d: continue
output[d] = data[d][inx_lat, inx_lon]
return output
示例9: readsav
def readsav(radar,date,time,param,bandLim,procType,dataDir):
"""
*******************************
dataObj = readsav(radar,date,time,param,bandlim)
INPUTS:
OUTPUTS:
Written by Nathaniel 14AUG2012
*******************************
"""
from scipy.io.idl import readsav
import numpy as np
import os.path
dateSt = str(date[0])
timeSt = '.'.join(["%s" % el for el in time])
bandLim = np.array(bandLim) * 1.e6
bandSt = '-'.join(["%i" % el for el in bandLim])
a = [dateSt,radar,param,bandSt,procType,'sav']
fileName = '.'.join(a)
path = '/'.join([dataDir,fileName])
if os.path.exists(path):
dataObj = readsav(path)
return dataObj
else:
print ' '.join([fileName,'does not exist.'])
sys.exit()
示例10: restore_save
def restore_save(savfile):
n = readsav(savfile)
key = n.keys()
if (len(key) != 1):
exit(".sav file is not a combined end2end file")
num_bands = int(n[key[0]]['num_bands'][0])
bands = n[key[0]]['bands'][0]
dbs_data = n[key[0]]['dbs_data_combined'][0]
dbs_sims = n[key[0]]['dbs_sims_combined'][0] # (nsims, nspecs, nbands)
winminell = int(n[key[0]]['winminell'][0])
winmaxell = int(n[key[0]]['winmaxell'][0])
winfunc_data = n[key[0]]['winfunc_data_combined'][0]
winfunc_sims = n[key[0]]['winfunc_sims_combined'][0]
cov_sv = n[key[0]]['cov_sv_combined'][0]
cov_noise = n[key[0]]['cov_noise_combined'][0]
d = {'num_bands':num_bands, 'bands':bands,
'dbs_data':dbs_data, 'dbs_sims':dbs_sims,
'winminell':winminell, 'winmaxell':winmaxell,
'winfunc_data':winfunc_data, 'winfunc_sims':winfunc_sims,
'cov_sv':cov_sv, 'cov_noise':cov_noise}
return d
示例11: idlToPandas
def idlToPandas(fileName, keyValue=0):
"""PURPOSE: To restore an IDL strcture contained
within an IDL save file and add it to a pandas
data frame."""
idlSavedVars = readsav(fileName)
#check if the keyValue passed in is actually an index
#rather than the keyValue name:
if valIsNumber(keyValue):
keys = idlSavedVars.keys()
keyValue = keys[keyValue]
struct = idlSavedVars[keyValue]
tags = []
for tag in struct.dtype.descr:
tags.append(tag[0][0])
#now take care of potential big-endian/little-endian issues
dt = struct.dtype
dt = dt.descr
for i in range(len(dt)):
if(dt[i][1][0] == '>' or dt[i][1][0] == '<'):
dt[i] = (dt[i][0], dt[i][1][1:])
struct = struct.astype(dt)
pdf = pd.DataFrame.from_records(struct, columns=tags)
return pdf
示例12: test_arrays
def test_arrays(self):
s = readsav(path.join(DATA_PATH, 'struct_pointer_arrays.sav'), verbose=False)
assert_array_identical(s.arrays.g[0], np.repeat(np.float32(4.), 2).astype(np.object_))
assert_array_identical(s.arrays.h[0], np.repeat(np.float32(4.), 3).astype(np.object_))
assert_(np.all(vect_id(s.arrays.g[0]) == id(s.arrays.g[0][0])))
assert_(np.all(vect_id(s.arrays.h[0]) == id(s.arrays.h[0][0])))
assert_(id(s.arrays.g[0][0]) == id(s.arrays.h[0][0]))
示例13: test_arrays_replicated_3d
def test_arrays_replicated_3d(self):
s = readsav(path.join(DATA_PATH, 'struct_arrays_replicated_3d.sav'), verbose=False)
# Check column types
assert_(s.arrays_rep.a.dtype.type is np.object_)
assert_(s.arrays_rep.b.dtype.type is np.object_)
assert_(s.arrays_rep.c.dtype.type is np.object_)
assert_(s.arrays_rep.d.dtype.type is np.object_)
# Check column shapes
assert_equal(s.arrays_rep.a.shape, (4, 3, 2))
assert_equal(s.arrays_rep.b.shape, (4, 3, 2))
assert_equal(s.arrays_rep.c.shape, (4, 3, 2))
assert_equal(s.arrays_rep.d.shape, (4, 3, 2))
# Check values
for i in range(4):
for j in range(3):
for k in range(2):
assert_array_identical(s.arrays_rep.a[i, j, k],
np.array([1, 2, 3], dtype=np.int16))
assert_array_identical(s.arrays_rep.b[i, j, k],
np.array([4., 5., 6., 7.],
dtype=np.float32))
assert_array_identical(s.arrays_rep.c[i, j, k],
np.array([np.complex64(1+2j),
np.complex64(7+8j)]))
assert_array_identical(s.arrays_rep.d[i, j, k],
np.array([b"cheese", b"bacon", b"spam"],
dtype=np.object))
示例14: get_positions
def get_positions():
pos = np.zeros((700, 2))
d = readsav('infohTEST_rj157.261')
for i in xrange(700):
print i,
pos[i,1] = d.infoarr[i]['pixel']
pos[i,0] = d.infoarr[i]['order']
示例15: test_null_pointer
def test_null_pointer():
"""
Regression test for null pointers.
"""
s = readsav(path.join(DATA_PATH, 'null_pointer.sav'), verbose=False)
assert_identical(s.point, None)
assert_identical(s.check, np.int16(5))