本文整理汇总了Python中chainer.functions.where方法的典型用法代码示例。如果您正苦于以下问题:Python functions.where方法的具体用法?Python functions.where怎么用?Python functions.where使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.functions
的用法示例。
在下文中一共展示了functions.where方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def __call__(self, x, mask):
self.m.W.data = self.xp.array(self.maskW) #mask windows are set by 1
h = self.c(x*mask) #(B,C,H,W)
B,C,H,W = h.shape
b = F.transpose(F.broadcast_to(self.c.b,(B,H,W,C)),(0,3,1,2))
h = h - b
mask_sums = self.m(mask)
mask_new = (self.xp.sign(mask_sums.data-0.5)+1.0)*0.5
mask_new_b = mask_new.astype("bool")
mask_sums = F.where(mask_new_b,mask_sums,0.01*Variable(self.xp.ones(mask_sums.shape).astype("f")))
h = h/mask_sums + b
mask_new = Variable(mask_new)
h = F.where(mask_new_b, h, Variable(self.xp.zeros(h.shape).astype("f")))
if self.bn:
h = self.batchnorm(h)
if self.noise:
h = add_noise(h)
if self.dropout:
h = F.dropout(h)
if not self.activation is None:
h = self.activation(h)
return h, mask_new
示例2: attend
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def attend(self, query, key, value, mask, minfs=None):
"""
Input shapes:
q=(b, units, dec_l), k=(b, units, enc_l),
v=(b, units, dec_l, enc_l), m=(b, dec_l, enc_l)
"""
# Calculate Attention Scores with Mask for Zero-padded Areas
pre_a = F.batch_matmul(query, key, transa=True) # (b, dec_l, enc_l)
minfs = self.xp.full(pre_a.shape, -np.inf, pre_a.dtype) \
if minfs is None else minfs
pre_a = F.where(mask, pre_a, minfs)
a = F.softmax(pre_a, axis=2)
# if values in axis=2 are all -inf, they become nan. thus do re-mask.
a = F.where(self.xp.isnan(a.data),
self.xp.zeros(a.shape, dtype=a.dtype), a)
reshaped_a = a[:, None] # (b, 1, dec_xl, enc_l)
# Calculate Weighted Sum
pre_c = F.broadcast_to(reshaped_a, value.shape) * value
c = F.sum(pre_c, axis=3, keepdims=True) # (b, units, dec_xl, 1)
return c
示例3: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def __call__(self, x, rois, roi_indices):
# global context module
h = self.global_context_module(x)
# psroi max align
pool = ps_roi_max_align_2d(
h, rois, roi_indices,
(10, self.roi_size, self.roi_size),
self.spatial_scale, self.roi_size,
sampling_ratio=2)
pool = F.where(
self.xp.isinf(pool.array),
self.xp.zeros(pool.shape, dtype=pool.dtype), pool)
# fc
fc1 = F.relu(self.fc1(pool))
roi_cls_locs = self.cls_loc(fc1)
roi_scores = self.score(fc1)
return roi_cls_locs, roi_scores
示例4: calc_loss
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def calc_loss(self, grids, image_size, **kwargs):
"""
Calculate a loss based on the expected grid size. Penalize all predicted grids, where the area of the grid
is smaller than the area of the crop area
"""
top_left_x, top_right_x, _, _, top_left_y, _, bottom_left_y, _ = self.get_corners(grids, image_size)
grid_widths = top_right_x - top_left_x
grid_heights = bottom_left_y - top_left_y
expected_width = self.xp.full_like(grid_widths.array, grids.shape[-1], dtype=grid_widths.dtype)
expected_height = self.xp.full_like(grid_heights.array, grids.shape[2], dtype=grid_heights.dtype)
width_loss = F.maximum(self.xp.zeros_like(grid_widths.array), expected_width - grid_widths)
height_loss = F.maximum(self.xp.zeros_like(grid_heights.array), expected_height - grid_heights)
return sum(width_loss) + sum(height_loss)
示例5: shifted_softplus
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def shifted_softplus(x, beta=1, shift=0.5, threshold=20):
"""shifted softplus function, which holds f(0)=0.
Args:
x (Variable): Input variable
beta (float): Parameter :math:`\\beta`.
shift (float): Shift Parameter
threshold (float): threshold to avoid overflow
Returns:
output (Variable): Output variable whose shape is same with `x`
"""
xp = chainer.cuda.get_array_module(x)
cond = chainer.as_variable(x).array > threshold
x = functions.where(cond, x,
functions.softplus(x, beta=beta))
x += xp.log(shift)
return x
示例6: evaluation
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def evaluation(model, test_image_folder, image_size=256):
@chainer.training.make_extension()
def _eval(trainer, it):
xp = model.xp
batch = it.next()
batchsize = len(batch)
x = xp.zeros((batchsize, 3, image_size, image_size)).astype("f")
m = xp.zeros((batchsize, 3, image_size, image_size)).astype("f")
for i in range(batchsize):
x[i, :] = xp.asarray(batch[i][0])
m[i, :] = xp.asarray(batch[i][1])
mask_b = xp.array(m.astype("bool"))
I_gt = Variable(x)
M = Variable(m)
M_b = Variable(mask_b)
I_out = model(x, m)
I_comp = F.where(M_b,I_gt,I_out)
img = I_comp.data.get()
img = batch_postprocess_images(img, int(batchsize/2), 2)
Image.fromarray(img).save(test_image_folder+"/iter_"+str(trainer.updater.iteration)+"_Icomp.jpg")
img = I_out.data.get()
img = batch_postprocess_images(img, int(batchsize/2), 2)
Image.fromarray(img).save(test_image_folder+"/iter_"+str(trainer.updater.iteration)+"_Iout.jpg")
img = M.data.get()
img = batch_postprocess_images(img, int(batchsize/2), 2)
Image.fromarray(img).save(test_image_folder+"/iter_"+str(trainer.updater.iteration)+"_mask.jpg")
def evaluation(trainer):
it = trainer.updater.get_iterator('test')
_eval(trainer, it)
return evaluation
示例7: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def __call__(self, x, mask):
#h = self.c(x) - self.b
self.m.W.data = self.xp.array(self.maskW) #mask windows are set by 1
h = self.c(x*mask) #(B,C,H,W)
B,C,H,W = h.shape
#b = F.transpose(F.broadcast_to(self.c.b,(B,H,W,C)),(0,3,1,2))
#h = h - b
mask_sums = self.m(mask)
mask_new = (self.xp.sign(mask_sums.data-0.5)+1.0)*0.5
mask_new_b = mask_new.astype("bool")
mask_sums = F.where(mask_new_b,mask_sums,0.01*Variable(self.xp.ones(mask_sums.shape).astype("f")))
h = h/mask_sums
#h = h/mask_sums + b
mask_new = Variable(mask_new)
h = F.where(mask_new_b, h, Variable(self.xp.zeros(h.shape).astype("f")))
#elif self.sample=="up":
# h = F.unpooling_2d(x, 2, 2, 0, cover_all=False)
# h = self.c(h)
#else:
# print("unknown sample method %s"%self.sample)
if self.bn:
h = self.batchnorm(h)
if self.noise:
h = add_noise(h)
if self.dropout:
h = F.dropout(h)
if not self.activation is None:
h = self.activation(h)
return h, mask_new
开发者ID:SeitaroShinagawa,项目名称:chainer-partial_convolution_image_inpainting,代码行数:34,代码来源:net_pre-trained.py
示例8: forward_expected
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def forward_expected(self, inputs):
c, x, y = inputs
z_expected = numpy.where(c, x, y)
return z_expected,
示例9: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def forward(self, inputs, devices):
c, x, y = inputs
z = functions.where(c, x, y)
return z,
示例10: check_forward_raises
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def check_forward_raises(self, c_data, x_data, y_data):
c = chainer.Variable(c_data)
x = chainer.Variable(x_data)
y = chainer.Variable(y_data)
with pytest.raises(type_check.InvalidType):
functions.where(c, x, y)
示例11: check_backward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def check_backward(self, x_data, roi_data, roi_index_data, y_grad):
def f(x, rois, roi_indices):
y = functions.roi_max_pooling_2d(
x, rois, roi_indices, outsize=self.outsize,
spatial_scale=self.spatial_scale)
xp = cuda.get_array_module(y)
# replace -inf with zero for gradient_check
y = functions.where(
xp.isinf(y.array), xp.zeros(y.shape, dtype=y.dtype), y)
return y
gradient_check.check_backward(
f, (x_data, roi_data, roi_index_data), y_grad,
no_grads=[False, True, True], **self.check_backward_options)
示例12: check_backward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def check_backward(self, x_data, roi_data, roi_index_data, y_grad):
def f(x, rois, roi_indices):
y = functions.roi_max_align_2d(
x, rois, roi_indices, outsize=self.outsize,
spatial_scale=self.spatial_scale,
sampling_ratio=self.sampling_ratio)
xp = chainer.backend.get_array_module(y)
y = functions.where(
xp.isinf(y.array), xp.zeros(y.shape, dtype=y.dtype), y)
return y
gradient_check.check_backward(
f, (x_data, roi_data, roi_index_data), y_grad,
no_grads=[False, True, True], **self.check_backward_options)
示例13: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def __call__(self, x, mask=None):
x = F.dropout(x, ratio=self.dropout)
out, pregate = F.split_axis(self.conv(x), 2, axis=1)
out = out * F.sigmoid(pregate)
if mask is not None:
out *= mask
return out
# TODO: For layers whose output is not directly fed to a gated linear
# unit, we initialize weights from N (0, p 1/nl) where nl is the number of
# input connections for each neuron.
示例14: attention_implementation
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def attention_implementation(self, query, key, value, mask=None, dropout_ratio=None):
scores = F.matmul(query, F.transpose(key, (0, 1, 3, 2))) / math.sqrt(self.key_dimensionality)
if mask is not None:
batch_size, num_heads, _, _ = scores.shape
mask = self.xp.array(mask)
mask = self.xp.broadcast_to(mask, (batch_size, num_heads) + mask.shape[2:])
mask = mask[:, :, :scores.shape[2], :scores.shape[3]]
scores = F.where(mask, scores, self.xp.full_like(scores.array, -1e9))
attention_probabilities = F.softmax(scores, axis=3)
if dropout_ratio is not None:
attention_probabilities = F.dropout(attention_probabilities, ratio=dropout_ratio)
return F.matmul(attention_probabilities, value), attention_probabilities
示例15: test_output
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import where [as 别名]
def test_output(self):
model = chainer.Sequential(
F.where
)
cond = np.array([[1, 0, 0], [0, 1, 0]], dtype=np.bool)
x = input_generator.increasing(2, 3)
y = np.zeros((2, 3), np.float32)
self.expect(model, (cond, x, y), skip_opset_version=[7, 8])