本文整理汇总了Python中pymc3.find_MAP方法的典型用法代码示例。如果您正苦于以下问题:Python pymc3.find_MAP方法的具体用法?Python pymc3.find_MAP怎么用?Python pymc3.find_MAP使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pymc3
的用法示例。
在下文中一共展示了pymc3.find_MAP方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: fit
# 需要导入模块: import pymc3 [as 别名]
# 或者: from pymc3 import find_MAP [as 别名]
def fit(self, X, Y, n_samples=10000, tune_steps=1000, n_jobs=4):
with pm.Model() as self.model:
# Priors
std = pm.Uniform("std", 0, self.sps, testval=X.std())
beta = pm.StudentT("beta", mu=0, lam=self.sps, nu=self.nu)
alpha = pm.StudentT("alpha", mu=0, lam=self.sps, nu=self.nu, testval=Y.mean())
# Deterministic model
mean = pm.Deterministic("mean", alpha + beta * X)
# Posterior distribution
obs = pm.Normal("obs", mu=mean, sd=std, observed=Y)
## Run MCMC
# Find search start value with maximum a posterior estimation
start = pm.find_MAP()
# sample posterior distribution for latent variables
trace = pm.sample(n_samples, njobs=n_jobs, tune=tune_steps, start=start)
# Recover posterior samples
self.burned_trace = trace[int(n_samples / 2):]
示例2: _laplace
# 需要导入模块: import pymc3 [as 别名]
# 或者: from pymc3 import find_MAP [as 别名]
def _laplace(model):
"""Fit a model using a laplace approximation.
Mainly for pedagogical use. ``mcmc`` and ``advi`` are better approximations.
Parameters
----------
model: PyMC3 model
Returns
-------
Dictionary, the keys are the names of the variables and the values tuples of modes and standard
deviations.
"""
with model:
varis = [v for v in model.unobserved_RVs if not pm.util.is_transformed_name(v.name)]
maps = pm.find_MAP(start=model.test_point, vars=varis)
hessian = pm.find_hessian(maps, vars=varis)
if np.linalg.det(hessian) == 0:
raise np.linalg.LinAlgError("Singular matrix. Use mcmc or advi method")
stds = np.diag(np.linalg.inv(hessian) ** 0.5)
maps = [v for (k, v) in maps.items() if not pm.util.is_transformed_name(k)]
modes = [v.item() if v.size == 1 else v for v in maps]
names = [v.name for v in varis]
shapes = [np.atleast_1d(mode).shape for mode in modes]
stds_reshaped = []
idx0 = 0
for shape in shapes:
idx1 = idx0 + sum(shape)
stds_reshaped.append(np.reshape(stds[idx0:idx1], shape))
idx0 = idx1
return dict(zip(names, zip(modes, stds_reshaped)))
示例3: fit_pymc3_model
# 需要导入模块: import pymc3 [as 别名]
# 或者: from pymc3 import find_MAP [as 别名]
def fit_pymc3_model(self, sampler, draws, tune, vi_params, **kwargs):
callbacks = vi_params.get("callbacks", [])
for i, c in enumerate(callbacks):
if isinstance(c, CheckParametersConvergence):
params = c.__dict__
params.pop("_diff")
params.pop("prev")
params.pop("ord")
params["diff"] = "absolute"
callbacks[i] = CheckParametersConvergence(**params)
if sampler == "variational":
with self.model:
try:
self.trace = pm.sample(chains=2, cores=8, tune=5, draws=5)
vi_params["start"] = self.trace[-1]
self.trace_vi = pm.fit(**vi_params)
self.trace = self.trace_vi.sample(draws=draws)
except Exception as e:
if hasattr(e, "message"):
message = e.message
else:
message = e
self.logger.error(message)
self.trace_vi = None
if self.trace_vi is None and self.trace is None:
with self.model:
self.logger.info(
"Error in vi ADVI sampler using Metropolis sampler with draws {}".format(
draws
)
)
self.trace = pm.sample(
chains=1, cores=4, tune=20, draws=20, step=pm.NUTS()
)
elif sampler == "metropolis":
with self.model:
start = pm.find_MAP()
self.trace = pm.sample(
chains=2,
cores=8,
tune=tune,
draws=draws,
**kwargs,
step=pm.Metropolis(),
start=start,
)
else:
with self.model:
self.trace = pm.sample(
chains=2, cores=8, tune=tune, draws=draws, **kwargs, step=pm.NUTS()
)
示例4: posterior_mcmc
# 需要导入模块: import pymc3 [as 别名]
# 或者: from pymc3 import find_MAP [as 别名]
def posterior_mcmc(self, data):
"""
Find posterior distribution for the numerical method of solution
"""
with pm.Model() as ab_model:
# priors
mua = pm.distributions.continuous.Beta('muA', alpha=self.alpha_prior, beta=self.beta_prior)
mub = pm.distributions.continuous.Beta('muB', alpha=self.alpha_prior, beta=self.beta_prior)
# likelihoods
pm.Bernoulli('likelihoodA', mua, observed=data[0])
pm.Bernoulli('likelihoodB', mub, observed=data[1])
# find distribution of difference
pm.Deterministic('lift', mub - mua)
# find distribution of effect size
sigma_a = pm.Deterministic('sigmaA', np.sqrt(mua * (1 - mua)))
sigma_b = pm.Deterministic('sigmaB', np.sqrt(mub * (1 - mub)))
pm.Deterministic('effect_size', (mub - mua) / (np.sqrt(0.5 * (sigma_a ** 2 + sigma_b ** 2))))
start = pm.find_MAP()
step = pm.Slice()
trace = pm.sample(self.iterations, step=step, start=start)
bins = np.linspace(0, 1, self.resolution)
mua = np.histogram(trace['muA'][500:], bins=bins, normed=True)
mub = np.histogram(trace['muB'][500:], bins=bins, normed=True)
sigma_a = np.histogram(trace['sigmaA'][500:], bins=bins, normed=True)
sigma_b = np.histogram(trace['sigmaB'][500:], bins=bins, normed=True)
rvs = trace['lift'][500:]
bins = np.linspace(np.min(rvs) - 0.2 * abs(np.min(rvs)), np.max(rvs) + 0.2 * abs(np.max(rvs)), self.resolution)
lift = np.histogram(rvs, bins=bins, normed=True)
rvs = trace['effect_size'][500:]
bins = np.linspace(np.min(rvs) - 0.2 * abs(np.min(rvs)), np.max(rvs) + 0.2 * abs(np.max(rvs)), self.resolution)
pes = np.histogram(rvs, bins=bins, normed=True)
posterior = {'muA': mua, 'muB': mub, 'sigmaA': sigma_a, 'sigmaB': sigma_b,
'lift': lift, 'es': pes, 'prior': self.prior()}
return posterior