本文整理汇总了Python中PALutils.createTimeLags方法的典型用法代码示例。如果您正苦于以下问题:Python PALutils.createTimeLags方法的具体用法?Python PALutils.createTimeLags怎么用?Python PALutils.createTimeLags使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类PALutils
的用法示例。
在下文中一共展示了PALutils.createTimeLags方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: optStat
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createTimeLags [as 别名]
def optStat(psr, ORF, gam=4.33333):
"""
Computes the Optimal statistic as defined in Chamberlin, Creighton, Demorest et al (2013)
@param psr: List of pulsar object instances
@param ORF: Vector of pairwise overlap reduction values
@param gam: Power Spectral index of GBW (default = 13/3, ie SMBMBs)
@return: Opt: Optimal statistic value (A_gw^2)
@return: sigma: 1-sigma uncertanty on Optimal statistic
@return: snr: signal-to-noise ratio of cross correlations
"""
#TODO: maybe compute ORF in code instead of reading it in. Would be less
# of a risk but a bit slower...
k = 0
npsr = len(psr)
top = 0
bot = 0
for ll in xrange(0, npsr):
for kk in xrange(ll+1, npsr):
# form matrix of toa residuals and compute SigmaIJ
tm = PALutils.createTimeLags(psr[ll].toas, psr[kk].toas)
# create cross covariance matrix without overall amplitude A^2
SIJ = ORF[k]/2 * PALutils.createRedNoiseCovarianceMatrix(tm, 1, gam)
# construct numerator and denominator of optimal statistic
bot += np.trace(np.dot(psr[ll].invCov, np.dot(SIJ, np.dot(psr[kk].invCov, SIJ.T))))
top += np.dot(psr[ll].res, np.dot(psr[ll].invCov, np.dot(SIJ, \
np.dot(psr[kk].invCov, psr[kk].res))))
k+=1
# compute optimal statistic
Opt = top/bot
# compute uncertainty
sigma = 1/np.sqrt(bot)
# compute SNR
snr = top/np.sqrt(bot)
# return optimal statistic and snr
return Opt, sigma, snr
示例2: crossPower
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createTimeLags [as 别名]
def crossPower(psr, gam=13/3):
"""
Compute the cross power as defined in Eq 9 and uncertainty of Eq 10 in
Demorest et al (2012).
@param psr: List of pulsar object instances
@param gam: Power spectral index of GWB
@return: vector of cross power for each pulsar pair
@return: vector of cross power uncertainties for each pulsar pair
"""
# initialization
npsr = len(psr)
# now compute cross power
rho = []
sig = []
xi = []
for ll in range(npsr):
for kk in range(ll+1, npsr):
# matrix of time lags
tm = PALutils.createTimeLags(psr[ll].toas, psr[kk].toas)
# create cross covariance matrix without overall amplitude A^2
SIJ = PALutils.createRedNoiseCovarianceMatrix(tm, 1, gam)
# construct numerator and denominator of optimal statistic
bot = np.trace(np.dot(psr[ll].invCov, np.dot(SIJ, np.dot(psr[kk].invCov, SIJ.T))))
top = np.dot(psr[ll].res, np.dot(psr[ll].invCov, np.dot(SIJ, \
np.dot(psr[kk].invCov, psr[kk].res))))
# cross correlation and uncertainty
rho.append(top/bot)
sig.append(1/np.sqrt(bot))
return np.array(rho), np.array(sig)
示例3: enumerate
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createTimeLags [as 别名]
logdetTerm = []
for ct, p in enumerate(psr):
efac = p.efac
equad = p.equad
Amp = p.Amp
gam = p.gam
fH = p.fH
print p.name, efac, equad, Amp, gam
# get white noise covariance matrix
white = PALutils.createWhiteNoiseCovarianceMatrix(p.err, efac, equad)
# get red noise covariance matrix
tm = PALutils.createTimeLags(p.toas, p.toas, round=True)
red = PALutils.createRedNoiseCovarianceMatrix(tm, Amp, gam, fH=fH)
C = np.dot(p.G.T, np.dot(red + white, p.G))
cf = sl.cho_factor(C)
logdetTerm.append(np.sum(2 * np.log(np.diag(cf[0])))) # + p.G.shape[1]*np.log(2*np.pi))
# get null model log-likelihood
nullLike = 0
for ct, p in enumerate(psr):
nullLike += -0.5 * logdetTerm[ct]
nullLike += -0.5 * np.dot(p.res, np.dot(p.invCov, p.res))
print "Null Like = {0}".format(nullLike)
示例4: addInverseCovFromNoiseFile
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createTimeLags [as 别名]
def addInverseCovFromNoiseFile(self, parfile, timfile, noisefile, DMOFF=None, DMXOFF=None, dailyAverage=False):
"""
Add noise covariance matrix after timing model subtraction.
"""
# Check whether the two files exist
if not os.path.isfile(parfile) or not os.path.isfile(timfile):
raise IOError, "Cannot find parfile (%s) or timfile (%s)!" % (parfile, timfile)
assert(self.filename != None), "ERROR: HDF5 file not set!"
# 'a' means: read/write if exists, create otherwise
self.h5file = h5.File(self.filename, 'a')
# Create the data subgroup if it does not exist
if "Data" in self.h5file:
datagroup = self.h5file["Data"]
else:
raise IOError, "Cannot add noise parameters if Data group does not exist!"
# Load pulsar data from the JPL Cython tempo2 library
t2pulsar = t2.tempopulsar(parfile, timfile)
# turn off DMMODEL fitting
if DMOFF is not None:
t2pulsar['DMMODEL'].fit = False
# turn off DMX fitting
if DMXOFF is not None:
DMXFlag = False
print 'Turning off DMX fitting and turning DM fitting on'
for par in t2pulsar.pars:
if 'DMX' in par:
t2pulsar[par].fit = False
t2pulsar['DM'].fit = True
DMXFlag = True
if DMXFlag== False:
print 'NO DMX for pulsar {0}'.format(t2pulsar.name)
# refit 5 times to make sure we are converged
t2pulsar.fit(iters=5)
# Create the pulsar subgroup if it does not exist
if "Pulsars" in datagroup:
pulsarsgroup = datagroup["Pulsars"]
else:
raise IOError, "Cannot add noise parameters if pulsar group does not exist!"
# Look up the name of the pulsar, and see if it exist
if t2pulsar.name in pulsarsgroup:
pass
else:
raise IOError, "%s must already exists in %s to add noise parameters!"\
% (t2pulsar.name, self.filename)
pulsarsgroup = pulsarsgroup[t2pulsar.name]
# first create G matrix from design matrix and toas
designmatrix = np.double(t2pulsar.designmatrix())
toas = np.double(t2pulsar.toas()*86400)
errs = np.double(t2pulsar.toaerrs*1e-6)
# if doing daily averaging
if dailyAverage:
# get average quantities
toas, qmatrix, errs, dmatrix, freqs, bands = PALutils.dailyAverage(t2pulsar)
# construct new daily averaged residuals and designmatrix
toas *= 86400
designmatrix = np.dot(qmatrix, dmatrix)
G = PALutils.createGmatrix(designmatrix)
# create matrix of time lags
tm = PALutils.createTimeLags(toas, toas, round=True)
# now read noise file to get model and parameters
file = open(noisefile,'r')
fH = None
tau = None
DMAmp = None
DMgam = None
for line in file.readlines():
# default parameters for different models other than pure PL
key = line.split()[0]
# get amplitude
if "Amp" == key:
Amp = float(line.split()[-1])
# get spectral index
elif "gam" == key:
gam = float(line.split()[-1])
# get efac
#.........这里部分代码省略.........
示例5: firstOrderLikelihood
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createTimeLags [as 别名]
def firstOrderLikelihood(psr, ORF, Agw, gamgw, Ared, gred, efac, equad, \
interpolate=False):
"""
Compute the value of the first-order likelihood as defined in
Ellis, Siemens, van Haasteren (2013).
@param psr: List of pulsar object instances
@param ORF: Vector of pairwise overlap reduction values
@param Agw: Amplitude of GWB in standard strain amplitude units
@param gamgw: Power spectral index of GWB
@param Ared: Vector of amplitudes of intrinsic red noise in GWB strain units
@param gamgw: Vector of power spectral index of red noise
@param efac: Vector of efacs
@param equad: Vector of equads
@param interpolate: Boolean to perform interpolation only with compressed
data. (default = False)
@return: Log-likelihood value
"""
npsr = len(psr)
loglike = 0
tmp = []
# start loop to evaluate auto-terms
for ll in range(npsr):
r1 = np.dot(psr[ll].G.T, psr[ll].res)
# create time lags
tm = PALutils.createTimeLags(psr[ll].toas, psr[ll].toas)
#TODO: finish option to do interpolation when using compression
# calculate auto GW covariance matrix
SC = PALutils.createRedNoiseCovarianceMatrix(tm, Agw, gamgw)
# calculate auto red noise covariance matrix
SA = PALutils.createRedNoiseCovarianceMatrix(tm, Ared[ll], gred[ll])
# create white noise covariance matrix
#TODO: add ability to use multiple efacs for different backends
white = PALutils.createWhiteNoiseCovarianceMatrix(psr[ll].err, efac[ll], equad[ll])
# total auto-covariance matrix
P = SC + SA + white
# sandwich with G matrices
Ppost = np.dot(psr[ll].G.T, np.dot(P, psr[ll].G))
# do cholesky solve
cf = sl.cho_factor(Ppost)
# solution vector P^_1 r
rr = sl.cho_solve(cf, r1)
# temporarily store P^-1 r
tmp.append(np.dot(psr[ll].G, rr))
# add to log-likelihood
loglike += -0.5 * (np.sum(np.log(2*np.pi*np.diag(cf[0])**2)) + np.dot(r1, rr))
# now compute cross terms
k = 0
for ll in range(npsr):
for kk in range(ll+1, npsr):
# create time lags
tm = PALutils.createTimeLags(psr[ll].toas, psr[kk].toas)
# create cross covariance matrix
SIJ = PALutils.createRedNoiseCovarianceMatrix(tm, 1, gamgw)
# carry out matrix-vetor operations
tmp1 = np.dot(SIJ, tmp[kk])
# add to likelihood
loglike += ORF[k]/2 * Agw**2 * np.dot(tmp[ll], tmp1)
# increment ORF counter
k += 1
return loglike
示例6:
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createTimeLags [as 别名]
Dmatrix = []
print 'Computing diagonalized auto-covariance matrices'
for key in pulsargroup:
# get noise values from file TODO: change this to read directly from pulsar class
Amp = pulsargroup[key]['Amp'].value
gam = pulsargroup[key]['gam'].value
efac = pulsargroup[key]['efac'].value
equad = pulsargroup[key]['equad'].value
try:
fH = pulsargroup[key]['fH'].value
except KeyError:
fH = None
# make covariance matrix
tm = PALutils.createTimeLags(psr[ct].toas, psr[ct].toas, round=True)
red = PALutils.createRedNoiseCovarianceMatrix(tm, Amp, gam, fH=fH)
white = PALutils.createWhiteNoiseCovarianceMatrix(psr[ct].err, efac, equad)
# sandwich with G matrices
cov = red + white
noiseCov = np.dot(psr[ct].G.T, np.dot(cov, psr[ct].G))
# diagonalize GW covariance matrix and noise covariance matrix
L = np.linalg.cholesky(noiseCov)
Linv = np.linalg.inv(L)
# get generalized GW covariance matrix with Amp = 1
redGW = PALutils.createRedNoiseCovarianceMatrix(tm, 1, 4.33333)
redGW = np.dot(psr[ct].G.T, np.dot(redGW, psr[ct].G))
示例7: enumerate
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createTimeLags [as 别名]
efac = p.efac
equad = p.equad
Amp = p.Amp
gam = p.gam
fH = p.fH
# efac = 1.0
# equad = 0
# Amp = 3e-15
# gam = 4.33
# get white noise covariance matrix
white = PALutils.createWhiteNoiseCovarianceMatrix(p.err, efac, equad)
# get red noise covariance matrix
tm = PALutils.createTimeLags(p.toas, p.toas)
red = PALutils.createRedNoiseCovarianceMatrix(tm, Amp, gam, fH=fH)
C = np.dot(p.G.T, np.dot(red + white, p.G))
cf = sl.cho_factor(C)
logdetTerm.append(np.sum(2 * np.log(np.diag(cf[0])))) # + p.G.shape[1]*np.log(2*np.pi))
invCov = np.dot(p.G, np.dot(np.linalg.inv(C), p.G.T))
p.invCov = invCov
# get null model log-likelihood
nullLike = 0
for ct, p in enumerate(psr):
nullLike += -0.5 * logdetTerm[ct]
nullLike += -0.5 * np.dot(p.res, np.dot(p.invCov, p.res))