本文整理汇总了Python中torch.default_generator方法的典型用法代码示例。如果您正苦于以下问题:Python torch.default_generator方法的具体用法?Python torch.default_generator怎么用?Python torch.default_generator使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类torch
的用法示例。
在下文中一共展示了torch.default_generator方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: resample
# 需要导入模块: import torch [as 别名]
# 或者: from torch import default_generator [as 别名]
def resample(self, seed=None):
"""Resample the dataset.
Args:
seed (int, optional): Seed for resampling. By default no seed is
used.
"""
if seed is not None:
gen = torch.manual_seed(seed)
else:
gen = torch.default_generator
if self.replacement:
self.perm = torch.LongTensor(len(self)).random_(
len(self.dataset), generator=gen)
else:
self.perm = torch.randperm(
len(self.dataset), generator=gen).narrow(0, 0, len(self))
示例2: forward
# 需要导入模块: import torch [as 别名]
# 或者: from torch import default_generator [as 别名]
def forward(self, input):
self._backend = type2backend[type(input)]
output = input.new()
self.noise = input.new()
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
self.save_for_backward(input)
return output
示例3: forward
# 需要导入模块: import torch [as 别名]
# 或者: from torch import default_generator [as 别名]
def forward(ctx, input, lower, upper, train, inplace):
ctx.lower = lower
ctx.upper = upper
ctx.train = train
ctx.inplace = inplace
ctx._backend = type2backend[type(input)]
if ctx.inplace:
ctx.mark_dirty(input)
output = input
else:
output = input.new(input.size())
ctx.noise = input.new()
ctx._backend.RReLU_updateOutput(
ctx._backend.library_state,
input,
output,
ctx.noise,
ctx.lower,
ctx.upper,
ctx.train,
ctx.inplace,
torch.default_generator if not input.is_cuda else 0
)
ctx.save_for_backward(input)
return output
示例4: updateOutput
# 需要导入模块: import torch [as 别名]
# 或者: from torch import default_generator [as 别名]
def updateOutput(self, input):
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
self.output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
return self.output