當前位置: 首頁>>代碼示例>>Python>>正文


Python pymc3.sample方法代碼示例

本文整理匯總了Python中pymc3.sample方法的典型用法代碼示例。如果您正苦於以下問題:Python pymc3.sample方法的具體用法?Python pymc3.sample怎麽用?Python pymc3.sample使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在pymc3的用法示例。


在下文中一共展示了pymc3.sample方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: model

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def model(profiles, comparisons, selections, sample=2500, alpha_prior_std=10):
    all_attributes = pd.get_dummies(profiles).columns
    profiles_dummies = pd.get_dummies(profiles, drop_first=True)
    choices = pd.concat({profile: profiles_dummies.loc[comparisons[profile]].reset_index(drop=True) for profile in comparisons.columns}, axis=1)

    respondants = selections.columns
    n_attributes_in_model = profiles_dummies.shape[1]
    n_participants = selections.shape[1]

    with pm.Model():

        # https://www.sawtoothsoftware.com/download/ssiweb/CBCHB_Manual.pdf
        # need to include the covariance matrix as a parent of `partsworth`
        alpha = pm.Normal('alpha', 0, sd=alpha_prior_std, shape=n_attributes_in_model, testval=np.random.randn(n_attributes_in_model))
        partsworth = pm.MvNormal("partsworth", alpha, tau=np.eye(n_attributes_in_model), shape=(n_participants, n_attributes_in_model))

        cs = [_create_observation_variable(selection, choices, partsworth[i, :]) for i, (_, selection) in enumerate(selections.iteritems())]

        trace = pm.sample(sample)
    return transform_trace_to_individual_summary_statistics(trace, respondants, profiles_dummies.columns, all_attributes) 
開發者ID:CamDavidsonPilon,項目名稱:lifestyles,代碼行數:22,代碼來源:cbc_hb.py

示例2: fit

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def fit(self, X, Y, n_samples=10000, tune_steps=1000, n_jobs=4):
        with pm.Model() as self.model:
            # Priors
            std = pm.Uniform("std", 0, self.sps, testval=X.std())
            beta = pm.StudentT("beta", mu=0, lam=self.sps, nu=self.nu)
            alpha = pm.StudentT("alpha", mu=0, lam=self.sps, nu=self.nu, testval=Y.mean())
            # Deterministic model
            mean = pm.Deterministic("mean", alpha + beta * X)
            # Posterior distribution
            obs = pm.Normal("obs", mu=mean, sd=std, observed=Y)
            ## Run MCMC
            # Find search start value with maximum a posterior estimation
            start = pm.find_MAP()
            # sample posterior distribution for latent variables
            trace = pm.sample(n_samples, njobs=n_jobs, tune=tune_steps, start=start)
            # Recover posterior samples
            self.burned_trace = trace[int(n_samples / 2):] 
開發者ID:naripok,項目名稱:cryptotrader,代碼行數:19,代碼來源:bayesian.py

示例3: __init__

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def __init__(self, **kwargs):
        """Create an instance of the BayesianLeastSquaresImputer class.

        The class requires multiple arguments necessary to create priors for
        a bayesian linear regression equation. The regression is:
        alpha + beta * X + epsilson. Because paramaters are treated as
        random variables, we must specify their distributions, including
        the parameters of those distributions. In thie init method we also
        include arguments used to sample the posterior distributions.

        Args:
            **kwargs: default keyword arguments used for bayesian analysis.
                Note - kwargs popped for default arguments defined below.
                Rest of kwargs passed as params to sampling (see pymc3).
            am (float, Optional): mean of alpha prior. Default 0.
            asd (float, Optional): std. deviation of alpha prior. Default 10.
            bm (float, Optional): mean of beta priors. Default 0.
            bsd (float, Optional): std. deviation of beta priors. Default 10.
            sig (float, Optional): parameter of sigma prior. Default 1.
            sample (int, Optional): number of posterior samples per chain.
                Default = 1000. More samples, longer to run, but better
                approximation of the posterior & chance of convergence.
            tune (int, Optional): parameter for tuning. Draws done in addition
                to sample. Default = 1000.
            init (str, Optional): MCMC algo to use for posterior sampling.
                Default = 'auto'. See pymc3 docs for more info on choices.
            fill_value (str, Optional): How to draw from the posterior to
                create imputations. Default is None. 'random' and 'mean'
                supported for explicit options.
        """
        self.am = kwargs.pop("am", 0)
        self.asd = kwargs.pop("asd", 10)
        self.bm = kwargs.pop("bm", 0)
        self.bsd = kwargs.pop("bsd", 10)
        self.sig = kwargs.pop("sig", 1)
        self.sample = kwargs.pop("sample", 1000)
        self.tune = kwargs.pop("tune", 1000)
        self.init = kwargs.pop("init", "auto")
        self.fill_value = kwargs.pop("fill_value", None)
        self.sample_kwargs = kwargs 
開發者ID:kearnz,項目名稱:autoimpute,代碼行數:42,代碼來源:bayesian_regression.py

示例4: main

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def main():

    #load data    
    returns = data.get_data_google('SPY', start='2008-5-1', end='2009-12-1')['Close'].pct_change()
    returns.plot()
    plt.ylabel('daily returns in %');
    
    with pm.Model() as sp500_model:
        
        nu = pm.Exponential('nu', 1./10, testval=5.0)
        sigma = pm.Exponential('sigma', 1./0.02, testval=0.1)
        
        s = pm.GaussianRandomWalk('s', sigma**-2, shape=len(returns))                
        r = pm.StudentT('r', nu, lam=pm.math.exp(-2*s), observed=returns)
        
    
    with sp500_model:
        trace = pm.sample(2000)

    pm.traceplot(trace, [nu, sigma]);
    plt.show()
    
    plt.figure()
    returns.plot()
    plt.plot(returns.index, np.exp(trace['s',::5].T), 'r', alpha=.03)
    plt.legend(['S&P500', 'stochastic volatility process'])
    plt.show() 
開發者ID:vsmolyakov,項目名稱:fin,代碼行數:29,代碼來源:stochastic_volatility.py

示例5: show_posteriors

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def show_posteriors(self, kde=True):
        if hasattr(self, 'burned_trace'):
            pm.plots.traceplot(trace=self.burned_trace, varnames=["std", "beta", "alpha"])
            pm.plot_posterior(trace=self.burned_trace, varnames=["std", "beta", "alpha"], kde_plot=kde)
            pm.plots.autocorrplot(trace=self.burned_trace, varnames=["std", "beta", "alpha"])
        else:
            print("You must sample from the posteriors first.") 
開發者ID:naripok,項目名稱:cryptotrader,代碼行數:9,代碼來源:bayesian.py

示例6: fit

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def fit(self, X, y):
        """
        Fits a Gaussian Process regressor using MCMC.

        Parameters
        ----------
        X: np.ndarray, shape=(nsamples, nfeatures)
            Training instances to fit the GP.
        y: np.ndarray, shape=(nsamples,)
            Corresponding continuous target values to `X`.

        """
        self.X = X
        self.n = self.X.shape[0]
        self.y = y
        self.model = pm.Model()

        with self.model as model:
            l = pm.Uniform('l', 0, 10)

            log_s2_f = pm.Uniform('log_s2_f', lower=-7, upper=5)
            s2_f = pm.Deterministic('sigmaf', tt.exp(log_s2_f))

            log_s2_n = pm.Uniform('log_s2_n', lower=-7, upper=5)
            s2_n = pm.Deterministic('sigman', tt.exp(log_s2_n))

            f_cov = s2_f * covariance_equivalence[type(self.covfunc).__name__](1, l)
            Sigma = f_cov(self.X) + tt.eye(self.n) * s2_n ** 2
            y_obs = pm.MvNormal('y_obs', mu=np.zeros(self.n), cov=Sigma, observed=self.y)
        with self.model as model:
            if self.step is not None:
                self.trace = pm.sample(self.niter, step=self.step())[self.burnin:]
            else:
                self.trace = pm.sample(self.niter, init=self.init)[self.burnin:] 
開發者ID:josejimenezluna,項目名稱:pyGPGO,代碼行數:36,代碼來源:GaussianProcessMCMC.py

示例7: predict

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def predict(self, Xstar, return_std=False, nsamples=10):
        """
        Returns mean and covariances for each posterior sampled Gaussian Process.

        Parameters
        ----------
        Xstar: np.ndarray, shape=((nsamples, nfeatures))
            Testing instances to predict.
        return_std: bool
            Whether to return the standard deviation of the posterior process. Otherwise,
            it returns the whole covariance matrix of the posterior process.
        nsamples:
            Number of posterior MCMC samples to consider.

        Returns
        -------
        np.ndarray
            Mean of the posterior process for each MCMC sample and `Xstar`.
        np.ndarray
            Covariance posterior process for each MCMC sample and `Xstar`.
        """
        chunk = list(self.trace)
        chunk = chunk[::-1][:nsamples]
        post_mean = []
        post_var = []
        for posterior_sample in chunk:
            params = self._extractParam(posterior_sample, self.covfunc.parameters)
            covfunc = self.covfunc.__class__(**params)
            gp = GaussianProcess(covfunc)
            gp.fit(self.X, self.y)
            m, s = gp.predict(Xstar, return_std=return_std)
            post_mean.append(m)
            post_var.append(s)
        return np.array(post_mean), np.array(post_var) 
開發者ID:josejimenezluna,項目名稱:pyGPGO,代碼行數:36,代碼來源:GaussianProcessMCMC.py

示例8: fit

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def fit(self, X, y):
        """
        Fits a Student-t regressor using MCMC.

        Parameters
        ----------
        X: np.ndarray, shape=(nsamples, nfeatures)
            Training instances to fit the GP.
        y: np.ndarray, shape=(nsamples,)
            Corresponding continuous target values to `X`.

        """
        self.X = X
        self.n = self.X.shape[0]
        self.y = y
        self.model = pm.Model()

        with self.model as model:
            l = pm.Uniform('l', 0, 10)

            log_s2_f = pm.Uniform('log_s2_f', lower=-7, upper=5)
            s2_f = pm.Deterministic('sigmaf', tt.exp(log_s2_f))

            log_s2_n = pm.Uniform('log_s2_n', lower=-7, upper=5)
            s2_n = pm.Deterministic('sigman', tt.exp(log_s2_n))

            f_cov = s2_f * covariance_equivalence[type(self.covfunc).__name__](1, l)
            Sigma = f_cov(self.X) + tt.eye(self.n) * s2_n ** 2
            y_obs = pm.MvStudentT('y_obs', nu=self.nu, mu=np.zeros(self.n), Sigma=Sigma, observed=self.y)
        with self.model as model:
            if self.step is not None:
                self.trace = pm.sample(self.niter, step=self.step())[self.burnin:]
            else:
                self.trace = pm.sample(self.niter, init=self.init)[self.burnin:] 
開發者ID:josejimenezluna,項目名稱:pyGPGO,代碼行數:36,代碼來源:tStudentProcessMCMC.py

示例9: predict

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def predict(self, Xstar, return_std=False, nsamples=10):
        """
        Returns mean and covariances for each posterior sampled Student-t Process.

        Parameters
        ----------
        Xstar: np.ndarray, shape=((nsamples, nfeatures))
            Testing instances to predict.
        return_std: bool
            Whether to return the standard deviation of the posterior process. Otherwise,
            it returns the whole covariance matrix of the posterior process.
        nsamples:
            Number of posterior MCMC samples to consider.

        Returns
        -------
        np.ndarray
            Mean of the posterior process for each MCMC sample and Xstar.
        np.ndarray
            Covariance posterior process for each MCMC sample and Xstar.
        """
        chunk = list(self.trace)
        chunk = chunk[::-1][:nsamples]
        post_mean = []
        post_var = []
        for posterior_sample in chunk:
            params = self._extractParam(posterior_sample, self.covfunc.parameters)
            covfunc = self.covfunc.__class__(**params)
            gp = tStudentProcess(covfunc, nu=self.nu + self.n)
            gp.fit(self.X, self.y)
            m, s = gp.predict(Xstar, return_std=return_std)
            post_mean.append(m)
            post_var.append(s)
        return np.array(post_mean), np.array(post_var) 
開發者ID:josejimenezluna,項目名稱:pyGPGO,代碼行數:36,代碼來源:tStudentProcessMCMC.py

示例10: sample_prior_predictive

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def sample_prior_predictive(self, X, batch_effects, samples, trace=None):
        """ Function to sample from prior predictive distribution """
        
        if len(X.shape)==1:
            X = np.expand_dims(X, axis=1)
        if len(batch_effects.shape)==1:
            batch_effects = np.expand_dims(batch_effects, axis=1)
            
        self.batch_effects_num = batch_effects.shape[1]
        self.batch_effects_size = []
        for i in range(self.batch_effects_num):
            self.batch_effects_size.append(len(np.unique(batch_effects[:,i])))
                
        y = np.zeros([X.shape[0],1])
        
        if self.model_type == 'linear': 
            with hbr(X, y, batch_effects, self.batch_effects_size, self.configs,
                     trace):
                ppc = pm.sample_prior_predictive(samples=samples) 
        elif self.model_type == 'polynomial':
            X = create_poly_basis(X, self.configs['order'])
            with hbr(X, y, batch_effects, self.batch_effects_size, self.configs,
                     trace):
                ppc = pm.sample_prior_predictive(samples=samples) 
        elif self.model_type == 'bspline': 
            self.bsp = bspline_fit(X, self.configs['order'], self.configs['nknots'])
            X = bspline_transform(X, self.bsp)
            with hbr(X, y, batch_effects, self.batch_effects_size, self.configs,
                     trace):
                ppc = pm.sample_prior_predictive(samples=samples) 
        elif self.model_type == 'nn': 
            with nn_hbr(X, y, batch_effects, self.batch_effects_size, self.configs,
                        trace):
                ppc = pm.sample_prior_predictive(samples=samples) 
        
        return ppc 
開發者ID:amarquand,項目名稱:nispat,代碼行數:38,代碼來源:hbr.py

示例11: _pyro_noncentered_model

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def _pyro_noncentered_model(J, sigma, y=None):
    import pyro
    import pyro.distributions as dist

    mu = pyro.sample("mu", dist.Normal(0, 5))
    tau = pyro.sample("tau", dist.HalfCauchy(5))
    with pyro.plate("J", J):
        eta = pyro.sample("eta", dist.Normal(0, 1))
        theta = mu + tau * eta
        return pyro.sample("obs", dist.Normal(theta, sigma), obs=y) 
開發者ID:arviz-devs,項目名稱:arviz,代碼行數:12,代碼來源:helpers.py

示例12: _numpyro_noncentered_model

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def _numpyro_noncentered_model(J, sigma, y=None):
    import numpyro
    import numpyro.distributions as dist

    mu = numpyro.sample("mu", dist.Normal(0, 5))
    tau = numpyro.sample("tau", dist.HalfCauchy(5))
    with numpyro.plate("J", J):
        eta = numpyro.sample("eta", dist.Normal(0, 1))
        theta = mu + tau * eta
        return numpyro.sample("obs", dist.Normal(theta, sigma), obs=y) 
開發者ID:arviz-devs,項目名稱:arviz,代碼行數:12,代碼來源:helpers.py

示例13: pymc3_noncentered_schools

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def pymc3_noncentered_schools(data, draws, chains):
    """Non-centered eight schools implementation for pymc3."""
    import pymc3 as pm

    with pm.Model() as model:
        mu = pm.Normal("mu", mu=0, sd=5)
        tau = pm.HalfCauchy("tau", beta=5)
        eta = pm.Normal("eta", mu=0, sd=1, shape=data["J"])
        theta = pm.Deterministic("theta", mu + tau * eta)
        pm.Normal("obs", mu=theta, sd=data["sigma"], observed=data["y"])
        trace = pm.sample(draws, chains=chains)
    return model, trace 
開發者ID:arviz-devs,項目名稱:arviz,代碼行數:14,代碼來源:helpers.py

示例14: sample_color

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def sample_color(image, samples=5000, tune=1000, nchains=4):
    """Run MCMC on a color image. EXPERIMENTAL!

    Parameters
    ----------
    image : numpy.ndarray
        Image array from `load_image`.  Should have `image.ndims == 2`.

    samples : int
        Number of samples to draw from the image

    tune : int
        All chains start at the same spot, so it is good to let them wander
        apart a bit before beginning

    Returns
    -------
    pymc3.MultiTrace of samples from the image. Each sample is an (x, y) float
        of indices that were sampled, with three variables named 'red',
        'green', 'blue'.
    """

    with pm.Model():
        pm.DensityDist('red', ImageLikelihood(image[:, :, 0]), shape=2)
        pm.DensityDist('green', ImageLikelihood(image[:, :, 1]), shape=2)
        pm.DensityDist('blue', ImageLikelihood(image[:, :, 2]), shape=2)

        trace = pm.sample(samples, chains=nchains, tune=tune, step=pm.Metropolis())
    return trace 
開發者ID:ColCarroll,項目名稱:imcmc,代碼行數:31,代碼來源:imcmc.py

示例15: sample

# 需要導入模塊: import pymc3 [as 別名]
# 或者: from pymc3 import sample [as 別名]
def sample(
            self, n_samples: int, beta: float = 1.):
        problem = self.problem
        log_post_fun = TheanoLogProbability(problem, beta)
        trace = self.trace

        x0 = None
        if self.x0 is not None:
            x0 = {x_name: val
                  for x_name, val in zip(self.problem.x_names, self.x0)}

        # create model context
        with pm.Model() as model:
            # uniform bounds
            k = [pm.Uniform(x_name, lower=lb, upper=ub)
                 for x_name, lb, ub in
                 zip(problem.get_reduced_vector(problem.x_names),
                     problem.lb, problem.ub)]

            # convert to tensor vector
            theta = tt.as_tensor_variable(k)

            # use a DensityDist for the log-posterior
            pm.DensityDist('log_post', logp=lambda v: log_post_fun(v),
                           observed={'v': theta})

            # step, by default automatically determined by pymc3
            step = None
            if self.step_function:
                step = self.step_function()

            # perform the actual sampling
            trace = pm.sample(
                draws=int(n_samples), trace=trace, start=x0, step=step,
                **self.options)

            # convert trace to inference data object
            data = az.from_pymc3(trace=trace, model=model)

        self.trace = trace
        self.data = data 
開發者ID:ICB-DCM,項目名稱:pyPESTO,代碼行數:43,代碼來源:pymc3.py


注:本文中的pymc3.sample方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。