本文整理汇总了Python中torch.autograd.Variable.get方法的典型用法代码示例。如果您正苦于以下问题:Python Variable.get方法的具体用法?Python Variable.get怎么用?Python Variable.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类torch.autograd.Variable
的用法示例。
在下文中一共展示了Variable.get方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_remote_backprop
# 需要导入模块: from torch.autograd import Variable [as 别名]
# 或者: from torch.autograd.Variable import get [as 别名]
def test_remote_backprop(self):
hook = TorchHook(verbose=False)
local = hook.local_worker
local.verbose = False
remote = VirtualWorker(id=1, hook=hook, verbose=False)
local.add_worker(remote)
x = Var(torch.ones(2, 2), requires_grad=True).send_(remote)
x2 = Var(torch.ones(2, 2)*2, requires_grad=True).send_(remote)
y = x * x2
y.sum().backward()
# remote grads should be correct
assert (remote._objects[x2.id].grad.data == torch.ones(2, 2)).all()
assert (remote._objects[x.id].grad.data == torch.ones(2, 2)*2).all()
assert (y.get().data == torch.ones(2, 2)*2).all()
assert (x.get().data == torch.ones(2, 2)).all()
assert (x2.get().data == torch.ones(2, 2)*2).all()
assert (x.grad.data == torch.ones(2, 2)*2).all()
assert (x2.grad.data == torch.ones(2, 2)).all()
示例2: test_torch_F_relu_on_remote_var
# 需要导入模块: from torch.autograd import Variable [as 别名]
# 或者: from torch.autograd.Variable import get [as 别名]
def test_torch_F_relu_on_remote_var(self):
hook = TorchHook(verbose=False)
me = hook.local_worker
remote = VirtualWorker(id=2,hook=hook)
me.add_worker(remote)
x = Var(torch.FloatTensor([[1, -1], [-1, 1]]))
x.send(remote)
x = F.relu(x)
x.get()
assert torch.equal(x, Var(torch.FloatTensor([[1, 0], [0, 1]])))