本文整理汇总了Python中PALutils.createWhiteNoiseCovarianceMatrix方法的典型用法代码示例。如果您正苦于以下问题:Python PALutils.createWhiteNoiseCovarianceMatrix方法的具体用法?Python PALutils.createWhiteNoiseCovarianceMatrix怎么用?Python PALutils.createWhiteNoiseCovarianceMatrix使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类PALutils
的用法示例。
在下文中一共展示了PALutils.createWhiteNoiseCovarianceMatrix方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: enumerate
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createWhiteNoiseCovarianceMatrix [as 别名]
print "Maximum likelihood from f-stat search = {0}\n".format(fmaxlike)
# get determinant of covariance matrix for use in likelihood
logdetTerm = []
for ct, p in enumerate(psr):
efac = p.efac
equad = p.equad
Amp = p.Amp
gam = p.gam
fH = p.fH
print p.name, efac, equad, Amp, gam
# get white noise covariance matrix
white = PALutils.createWhiteNoiseCovarianceMatrix(p.err, efac, equad)
# get red noise covariance matrix
tm = PALutils.createTimeLags(p.toas, p.toas, round=True)
red = PALutils.createRedNoiseCovarianceMatrix(tm, Amp, gam, fH=fH)
C = np.dot(p.G.T, np.dot(red + white, p.G))
cf = sl.cho_factor(C)
logdetTerm.append(np.sum(2 * np.log(np.diag(cf[0])))) # + p.G.shape[1]*np.log(2*np.pi))
# get null model log-likelihood
nullLike = 0
for ct, p in enumerate(psr):
nullLike += -0.5 * logdetTerm[ct]
nullLike += -0.5 * np.dot(p.res, np.dot(p.invCov, p.res))
示例2: firstOrderLikelihood
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createWhiteNoiseCovarianceMatrix [as 别名]
def firstOrderLikelihood(psr, ORF, Agw, gamgw, Ared, gred, efac, equad, \
interpolate=False):
"""
Compute the value of the first-order likelihood as defined in
Ellis, Siemens, van Haasteren (2013).
@param psr: List of pulsar object instances
@param ORF: Vector of pairwise overlap reduction values
@param Agw: Amplitude of GWB in standard strain amplitude units
@param gamgw: Power spectral index of GWB
@param Ared: Vector of amplitudes of intrinsic red noise in GWB strain units
@param gamgw: Vector of power spectral index of red noise
@param efac: Vector of efacs
@param equad: Vector of equads
@param interpolate: Boolean to perform interpolation only with compressed
data. (default = False)
@return: Log-likelihood value
"""
npsr = len(psr)
loglike = 0
tmp = []
# start loop to evaluate auto-terms
for ll in range(npsr):
r1 = np.dot(psr[ll].G.T, psr[ll].res)
# create time lags
tm = PALutils.createTimeLags(psr[ll].toas, psr[ll].toas)
#TODO: finish option to do interpolation when using compression
# calculate auto GW covariance matrix
SC = PALutils.createRedNoiseCovarianceMatrix(tm, Agw, gamgw)
# calculate auto red noise covariance matrix
SA = PALutils.createRedNoiseCovarianceMatrix(tm, Ared[ll], gred[ll])
# create white noise covariance matrix
#TODO: add ability to use multiple efacs for different backends
white = PALutils.createWhiteNoiseCovarianceMatrix(psr[ll].err, efac[ll], equad[ll])
# total auto-covariance matrix
P = SC + SA + white
# sandwich with G matrices
Ppost = np.dot(psr[ll].G.T, np.dot(P, psr[ll].G))
# do cholesky solve
cf = sl.cho_factor(Ppost)
# solution vector P^_1 r
rr = sl.cho_solve(cf, r1)
# temporarily store P^-1 r
tmp.append(np.dot(psr[ll].G, rr))
# add to log-likelihood
loglike += -0.5 * (np.sum(np.log(2*np.pi*np.diag(cf[0])**2)) + np.dot(r1, rr))
# now compute cross terms
k = 0
for ll in range(npsr):
for kk in range(ll+1, npsr):
# create time lags
tm = PALutils.createTimeLags(psr[ll].toas, psr[kk].toas)
# create cross covariance matrix
SIJ = PALutils.createRedNoiseCovarianceMatrix(tm, 1, gamgw)
# carry out matrix-vetor operations
tmp1 = np.dot(SIJ, tmp[kk])
# add to likelihood
loglike += ORF[k]/2 * Agw**2 * np.dot(tmp[ll], tmp1)
# increment ORF counter
k += 1
return loglike
示例3: addInverseCovFromNoiseFile
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createWhiteNoiseCovarianceMatrix [as 别名]
#.........这里部分代码省略.........
# first create G matrix from design matrix and toas
designmatrix = np.double(t2pulsar.designmatrix())
toas = np.double(t2pulsar.toas()*86400)
errs = np.double(t2pulsar.toaerrs*1e-6)
# if doing daily averaging
if dailyAverage:
# get average quantities
toas, qmatrix, errs, dmatrix, freqs, bands = PALutils.dailyAverage(t2pulsar)
# construct new daily averaged residuals and designmatrix
toas *= 86400
designmatrix = np.dot(qmatrix, dmatrix)
G = PALutils.createGmatrix(designmatrix)
# create matrix of time lags
tm = PALutils.createTimeLags(toas, toas, round=True)
# now read noise file to get model and parameters
file = open(noisefile,'r')
fH = None
tau = None
DMAmp = None
DMgam = None
for line in file.readlines():
# default parameters for different models other than pure PL
key = line.split()[0]
# get amplitude
if "Amp" == key:
Amp = float(line.split()[-1])
# get spectral index
elif "gam" == key:
gam = float(line.split()[-1])
# get efac
elif "efac" == key:
efac = float(line.split()[-1])
# get quad
elif "equad" == key:
equad = float(line.split()[-1])
# get high frequency cutoff if available
elif "fH" == key:
fH = float(line.split()[-1])
# get correlation time scale if available
elif "tau" == key:
tau = float(line.split()[-1])
# get DM Amplitude if available
elif "DMAmp" == key:
DMAmp = float(line.split()[-1])
# get DM Spectral Index if available
elif "DMgam" == key:
DMgam = float(line.split()[-1])
# cosstruct red and white noise covariance matrices
red = PALutils.createRedNoiseCovarianceMatrix(tm, Amp, gam, fH=fH)
white = PALutils.createWhiteNoiseCovarianceMatrix(errs, efac, equad, tau=tau)
# construct post timing model marginalization covariance matrix
cov = red + white
pcov = np.dot(G.T, np.dot(cov, G))
# finally construct "inverse"
invCov = np.dot(G, np.dot(np.linalg.inv(pcov), G.T))
# create dataset for inverse covariance matrix
pulsarsgroup.create_dataset('invCov', data = invCov)
# create dataset for G matrix
pulsarsgroup.create_dataset('Gmatrix', data = G)
# record noise parameter values
pulsarsgroup.create_dataset('Amp', data = Amp)
pulsarsgroup.create_dataset('gam', data = gam)
pulsarsgroup.create_dataset('efac', data = efac)
pulsarsgroup.create_dataset('equad', data = equad)
if fH is not None:
pulsarsgroup.create_dataset('fH', data = fH)
if tau is not None:
pulsarsgroup.create_dataset('tau', data = tau)
if DMAmp is not None:
pulsarsgroup.create_dataset('DMAmp', data = DMAmp)
if DMgam is not None:
pulsarsgroup.create_dataset('DMgam', data = DMgam)
# Close the hdf5 file
self.h5file.close()
示例4:
# 需要导入模块: import PALutils [as 别名]
# 或者: from PALutils import createWhiteNoiseCovarianceMatrix [as 别名]
try:
fH = pfile['Data']['Pulsars'][p.name]['fH'].value
except KeyError:
fH = None
# get values from hdf5 file
try:
Amp = pfile['Data']['Pulsars'][p.name]['Amp'].value
gam = pfile['Data']['Pulsars'][p.name]['gam'].value
efac = pfile['Data']['Pulsars'][p.name]['efac'].value
equad = pfile['Data']['Pulsars'][p.name]['equad'].value
tm = PALutils.createTimeLags(p.toas, p.toas)
red = PALutils.createRedNoiseCovarianceMatrix(tm, Amp, gam, fH=fH)
white = PALutils.createWhiteNoiseCovarianceMatrix(p.err, efac, equad)
cov = red + white
# cholesky decomp
L = np.linalg.cholesky(cov)
# set random number seed
if args.seed:
print 'Using fixed random number seed!'
np.random.seed(seed=args.seed*(ct+1))
# zero mean unit variance
w = np.random.randn(p.ntoa)
# get induced residuals