本文整理汇总了Python中torch.distributions.Uniform方法的典型用法代码示例。如果您正苦于以下问题:Python distributions.Uniform方法的具体用法?Python distributions.Uniform怎么用?Python distributions.Uniform使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类torch.distributions
的用法示例。
在下文中一共展示了distributions.Uniform方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_zdist
# 需要导入模块: from torch import distributions [as 别名]
# 或者: from torch.distributions import Uniform [as 别名]
def get_zdist(dist_name, dim, device=None):
# Get distribution
if dist_name == 'uniform':
low = -torch.ones(dim, device=device)
high = torch.ones(dim, device=device)
zdist = distributions.Uniform(low, high)
elif dist_name == 'gauss':
mu = torch.zeros(dim, device=device)
scale = torch.ones(dim, device=device)
zdist = distributions.Normal(mu, scale)
else:
raise NotImplementedError
# Add dim attribute
zdist.dim = dim
return zdist
示例2: __init__
# 需要导入模块: from torch import distributions [as 别名]
# 或者: from torch.distributions import Uniform [as 别名]
def __init__(self, max_perc, hop_length=None, n_freq=201, fixed_rate=None):
super(RandomTimeStretch, self).__init__(hop_length, n_freq, fixed_rate)
self._dist = Uniform(1.-max_perc, 1+max_perc)
示例3: logistic_distribution
# 需要导入模块: from torch import distributions [as 别名]
# 或者: from torch.distributions import Uniform [as 别名]
def logistic_distribution(loc, log_scale):
scale = torch.exp(log_scale) + 1e-5
base_distribution = distributions.Uniform(torch.zeros_like(loc), torch.ones_like(loc))
transforms = [LogisticTransform(), distributions.AffineTransform(loc=loc, scale=scale)]
logistic = distributions.TransformedDistribution(base_distribution, transforms)
return logistic
示例4: _sample_batch_from_proposal
# 需要导入模块: from torch import distributions [as 别名]
# 或者: from torch.distributions import Uniform [as 别名]
def _sample_batch_from_proposal(self, batch_size,
return_log_density_of_samples=False):
# need to do n_samples passes through autoregressive net
samples = torch.zeros(batch_size, self.autoregressive_net.input_dim)
log_density_of_samples = torch.zeros(batch_size,
self.autoregressive_net.input_dim)
for dim in range(self.autoregressive_net.input_dim):
# compute autoregressive outputs
autoregressive_outputs = self.autoregressive_net(samples).reshape(-1,
self.dim,
self.autoregressive_net.output_dim_multiplier)
# grab proposal params for dth dimensions
proposal_params = autoregressive_outputs[..., dim, self.context_dim:]
# make mixture coefficients, locs, and scales for proposal
logits = proposal_params[...,
:self.n_proposal_mixture_components] # [B, D, M]
if logits.shape[0] == 1:
logits = logits.reshape(self.dim, self.n_proposal_mixture_components)
locs = proposal_params[...,
self.n_proposal_mixture_components:(
2 * self.n_proposal_mixture_components)] # [B, D, M]
scales = self.mixture_component_min_scale + self.scale_activation(
proposal_params[...,
(2 * self.n_proposal_mixture_components):]) # [B, D, M]
# create proposal
if self.Component is not None:
mixture_distribution = distributions.OneHotCategorical(
logits=logits,
validate_args=True
)
components_distribution = self.Component(loc=locs, scale=scales)
self.proposal = distributions_.MixtureSameFamily(
mixture_distribution=mixture_distribution,
components_distribution=components_distribution
)
proposal_samples = self.proposal.sample((1,)) # [S, B, D]
else:
self.proposal = distributions.Uniform(low=-4, high=4)
proposal_samples = self.proposal.sample(
(1, batch_size, 1)
)
proposal_samples = proposal_samples.permute(1, 2, 0) # [B, D, S]
proposal_log_density = self.proposal.log_prob(proposal_samples)
log_density_of_samples[:, dim] += proposal_log_density.reshape(-1).detach()
samples[:, dim] += proposal_samples.reshape(-1).detach()
if return_log_density_of_samples:
return samples, torch.sum(log_density_of_samples, dim=-1)
else:
return samples