当前位置: 首页>>代码示例>>Python>>正文


Python constraints.positive方法代码示例

本文整理汇总了Python中torch.distributions.constraints.positive方法的典型用法代码示例。如果您正苦于以下问题:Python constraints.positive方法的具体用法?Python constraints.positive怎么用?Python constraints.positive使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在torch.distributions.constraints的用法示例。


在下文中一共展示了constraints.positive方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_constraints

# 需要导入模块: from torch.distributions import constraints [as 别名]
# 或者: from torch.distributions.constraints import positive [as 别名]
def test_constraints(backend, jit):
    data = torch.tensor(0.5)

    def model():
        locs = pyro.param("locs", torch.randn(3), constraint=constraints.real)
        scales = pyro.param("scales", torch.randn(3).exp(), constraint=constraints.positive)
        p = torch.tensor([0.5, 0.3, 0.2])
        x = pyro.sample("x", dist.Categorical(p))
        pyro.sample("obs", dist.Normal(locs[x], scales[x]), obs=data)

    def guide():
        q = pyro.param("q", torch.randn(3).exp(), constraint=constraints.simplex)
        pyro.sample("x", dist.Categorical(q))

    with pyro_backend(backend):
        Elbo = infer.JitTrace_ELBO if jit else infer.Trace_ELBO
        elbo = Elbo(ignore_jit_warnings=True)
        assert_ok(model, guide, elbo) 
开发者ID:pyro-ppl,项目名称:funsor,代码行数:20,代码来源:test_minipyro.py

示例2: __init__

# 需要导入模块: from torch.distributions import constraints [as 别名]
# 或者: from torch.distributions.constraints import positive [as 别名]
def __init__(self, n, eta, validate_args=False):
        TModule.__init__(self)
        if not isinstance(n, int) or n < 1:
            raise ValueError("n must be a positive integer")
        if isinstance(eta, Number):
            eta = torch.tensor(float(eta))
        self.n = torch.tensor(n, dtype=torch.long, device=eta.device)
        batch_shape = eta.shape
        event_shape = torch.Size([n, n])
        # Normalization constant(s)
        i = torch.arange(n, dtype=eta.dtype, device=eta.device)
        C = (((2 * eta.view(-1, 1) - 2 + i) * i).sum(1) * math.log(2)).view_as(eta)
        C += n * torch.sum(2 * torch.lgamma(i / 2 + 1) - torch.lgamma(i + 2))
        # need to assign values before registering as buffers to make argument validation work
        self.eta = eta
        self.C = C
        super(LKJPrior, self).__init__(batch_shape, event_shape, validate_args=validate_args)
        # now need to delete to be able to register buffer
        del self.eta, self.C
        self.register_buffer("eta", eta)
        self.register_buffer("C", C) 
开发者ID:cornellius-gp,项目名称:gpytorch,代码行数:23,代码来源:lkj_prior.py

示例3: _is_valid_correlation_matrix

# 需要导入模块: from torch.distributions import constraints [as 别名]
# 或者: from torch.distributions.constraints import positive [as 别名]
def _is_valid_correlation_matrix(Sigma, tol=1e-6):
    """Check if supplied matrix is a valid correlation matrix

    A matrix is a valid correlation matrix if it is positive semidefinite, and
    if all diagonal elements are equal to 1.

    Args:
        Sigma: A n x n correlation matrix, or a batch of b correlation matrices
            with shape b x n x n
        tol: The tolerance with which to check unit value of the diagonal elements

    Returns:
        True if Sigma is a valid correlation matrix, False otherwise (in batch
            mode, all matrices in the batch need to be valid correlation matrices)

    """
    pdef = torch.all(constraints.positive_definite.check(Sigma))
    return pdef and all(torch.all(torch.abs(S.diag() - 1) < tol) for S in Sigma.view(-1, *Sigma.shape[-2:])) 
开发者ID:cornellius-gp,项目名称:gpytorch,代码行数:20,代码来源:lkj_prior.py

示例4: _is_valid_correlation_matrix_cholesky_factor

# 需要导入模块: from torch.distributions import constraints [as 别名]
# 或者: from torch.distributions.constraints import positive [as 别名]
def _is_valid_correlation_matrix_cholesky_factor(L, tol=1e-6):
    """Check if supplied matrix is a Cholesky factor of a valid correlation matrix

    A matrix is a Cholesky fator of a valid correlation matrix if it is lower
    triangular, has positive diagonal, and unit row-sum

    Args:
        L: A n x n lower-triangular matrix, or a batch of b lower-triangular
            matrices with shape b x n x n
        tol: The tolerance with which to check positivity of the diagonal and
            unit-sum of the rows

    Returns:
        True if L is a Cholesky factor of a valid correlation matrix, False
            otherwise (in batch mode, all matrices in the batch need to be
            Cholesky factors of valid correlation matrices)

    """
    unit_row_length = torch.all((torch.norm(L, dim=-1) - 1).abs() < tol)
    return unit_row_length and torch.all(constraints.lower_cholesky.check(L)) 
开发者ID:cornellius-gp,项目名称:gpytorch,代码行数:22,代码来源:lkj_prior.py

示例5: initialize_params

# 需要导入模块: from torch.distributions import constraints [as 别名]
# 或者: from torch.distributions.constraints import positive [as 别名]
def initialize_params(self):

        # dictionary of guide random effect parameters
        params = {
            "eps_g": {},
            "eps_i": {},
        }

        N_state = self.config["sizes"]["state"]

        # initialize group-level parameters
        if self.config["group"]["random"] == "continuous":

            params["eps_g"]["loc"] = Tensor(
                pyro.param("loc_group",
                           lambda: torch.zeros((N_state, N_state))),
                OrderedDict([("y_prev", bint(N_state))]),
            )

            params["eps_g"]["scale"] = Tensor(
                pyro.param("scale_group",
                           lambda: torch.ones((N_state, N_state)),
                           constraint=constraints.positive),
                OrderedDict([("y_prev", bint(N_state))]),
            )

        # initialize individual-level random effect parameters
        N_c = self.config["sizes"]["group"]
        if self.config["individual"]["random"] == "continuous":

            params["eps_i"]["loc"] = Tensor(
                pyro.param("loc_individual",
                           lambda: torch.zeros((N_c, N_state, N_state))),
                OrderedDict([("g", bint(N_c)), ("y_prev", bint(N_state))]),
            )

            params["eps_i"]["scale"] = Tensor(
                pyro.param("scale_individual",
                           lambda: torch.ones((N_c, N_state, N_state)),
                           constraint=constraints.positive),
                OrderedDict([("g", bint(N_c)), ("y_prev", bint(N_state))]),
            )

        self.params = params
        return self.params 
开发者ID:pyro-ppl,项目名称:funsor,代码行数:47,代码来源:model.py

示例6: main

# 需要导入模块: from torch.distributions import constraints [as 别名]
# 或者: from torch.distributions.constraints import positive [as 别名]
def main(args):
    # Define a basic model with a single Normal latent random variable `loc`
    # and a batch of Normally distributed observations.
    def model(data):
        loc = pyro.sample("loc", dist.Normal(0., 1.))
        with pyro.plate("data", len(data), dim=-1):
            pyro.sample("obs", dist.Normal(loc, 1.), obs=data)

    # Define a guide (i.e. variational distribution) with a Normal
    # distribution over the latent random variable `loc`.
    def guide(data):
        guide_loc = pyro.param("guide_loc", torch.tensor(0.))
        guide_scale = pyro.param("guide_scale", torch.tensor(1.),
                                 constraint=constraints.positive)
        pyro.sample("loc", dist.Normal(guide_loc, guide_scale))

    # Generate some data.
    torch.manual_seed(0)
    data = torch.randn(100) + 3.0

    # Because the API in minipyro matches that of Pyro proper,
    # training code works with generic Pyro implementations.
    with pyro_backend(args.backend), interpretation(monte_carlo):
        # Construct an SVI object so we can do variational inference on our
        # model/guide pair.
        Elbo = infer.JitTrace_ELBO if args.jit else infer.Trace_ELBO
        elbo = Elbo()
        adam = optim.Adam({"lr": args.learning_rate})
        svi = infer.SVI(model, guide, adam, elbo)

        # Basic training loop
        pyro.get_param_store().clear()
        for step in range(args.num_steps):
            loss = svi.step(data)
            if args.verbose and step % 100 == 0:
                print("step {} loss = {}".format(step, loss))

        # Report the final values of the variational parameters
        # in the guide after training.
        if args.verbose:
            for name in pyro.get_param_store():
                value = pyro.param(name).data
                print("{} = {}".format(name, value.detach().cpu().numpy()))

        # For this simple (conjugate) model we know the exact posterior. In
        # particular we know that the variational distribution should be
        # centered near 3.0. So let's check this explicitly.
        assert (pyro.param("guide_loc") - 3.0).abs() < 0.1 
开发者ID:pyro-ppl,项目名称:funsor,代码行数:50,代码来源:minipyro.py


注:本文中的torch.distributions.constraints.positive方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。