本文整理汇总了Python中chainer.cuda.ndarray方法的典型用法代码示例。如果您正苦于以下问题:Python cuda.ndarray方法的具体用法?Python cuda.ndarray怎么用?Python cuda.ndarray使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.cuda
的用法示例。
在下文中一共展示了cuda.ndarray方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: visualize_layer_activations
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def visualize_layer_activations(model, im, layer_idx):
"""Compute the activations for each feature map for the given layer for
this particular image. Note that the input x should be a mini-batch
of size one, i.e. a single image.
"""
if model._device_id is not None and model._device_id >= 0: # Using GPU
im = cuda.cupy.array(im)
activations = model.activations(Variable(im), layer_idx)
if isinstance(activations, cuda.ndarray):
activations = cuda.cupy.asnumpy(activations)
# Rescale to [0, 255]
activations -= activations.min()
activations /= activations.max()
activations *= 255
return activations.astype(np.uint8)
示例2: __call__
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def __call__(self, key, value):
key = self.path + key.lstrip('/')
if not self.strict and key not in self.npz:
return value
dataset = None
for npz in self.npz_list:
try:
this_d = npz[key]
except KeyError:
this_d = npz["updater/model:main/"+key]
if dataset is None:
dataset = this_d
else:
dataset = dataset + this_d
dataset /= len(self.npz_list)
if value is None:
return dataset
elif isinstance(value, np.ndarray):
np.copyto(value, dataset)
elif isinstance(value, cuda.ndarray):
value.set(np.asarray(dataset))
else:
value = type(value)(np.asarray(dataset))
return value
示例3: lazy_init_conv_to_join
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def lazy_init_conv_to_join(block, x):
if not hasattr(block, 'Conv2d_1x1'):
with block.init_scope():
block.Conv2d_1x1 = L.Convolution2D(x.shape[1], 1, initialW=I.HeNormal())
if isinstance(x.data, cuda.ndarray):
block.Conv2d_1x1.to_gpu(x.data.device)
示例4: _check_class_weight_option
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def _check_class_weight_option(class_weight):
if class_weight is not None:
if class_weight.ndim != 1:
raise ValueError('class_weight.ndim should be 1')
if class_weight.dtype.kind != 'f':
raise ValueError('The dtype of class_weight should be \'f\'')
if isinstance(class_weight, variable.Variable):
raise ValueError('class_weight should be a numpy.ndarray or '
'cupy.ndarray, not a chainer.Variable')
示例5: _check_input_values
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def _check_input_values(x, t, ignore_label):
# Extract the raw ndarray as Variable.__ge__ is not implemented.
# We assume that t is already an ndarray.
if isinstance(x, variable.Variable):
x = x.data
if not (((0 <= t) &
(t < x.shape[1])) |
(t == ignore_label)).all():
msg = ('Each label `t` need to satisfy '
'`0 <= t < x.shape[1] or t == %d`' % ignore_label)
raise ValueError(msg)
示例6: forward
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def forward(self, inputs):
if any(isinstance(x, cuda.ndarray) for x in inputs):
return self.forward_gpu(inputs)
else:
return self.forward_cpu(inputs)
示例7: backward
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def backward(self, inputs, grad_outputs):
if any(isinstance(x, cuda.ndarray) for x in inputs + grad_outputs):
return self.backward_gpu(inputs, grad_outputs)
else:
return self.backward_cpu(inputs, grad_outputs)
示例8: _concat_arrays
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def _concat_arrays(arrays, padding):
if not isinstance(arrays[0], numpy.ndarray) and\
not isinstance(arrays[0], cuda.ndarray):
arrays = numpy.asarray(arrays)
xp = cuda.get_array_module(arrays[0])
with cuda.get_device_from_array(arrays[0]):
return xp.concatenate(arrays)
示例9: default
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def default(self, obj):
if isinstance(obj, numpy.integer):
return int(obj)
elif isinstance(obj, numpy.floating):
return float(obj)
elif isinstance(obj, numpy.ndarray):
return obj.tolist()
elif isinstance(obj, cuda.ndarray):
return cuda.to_cpu(obj).tolist()
elif _is_pathlib_available and isinstance(obj, PurePath):
# save as str representation
# convert windows path separator to linux format
return str(obj).replace('\\', '/')
else:
return super(JSONEncoderEX, self).default(obj)
示例10: check_add_deconv_layers
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def check_add_deconv_layers(self, nobias=True):
"""Add a deconvolutional layer for each convolutional layer already
defined in the network."""
if len(self.deconv_blocks) == len(self.conv_blocks):
return
for conv_block in self.conv_blocks:
deconv_block = []
for conv in conv_block:
out_channels, in_channels, kh, kw = conv.W.data.shape
if isinstance(conv.W.data, cuda.ndarray):
initialW = cuda.cupy.asnumpy(conv.W.data)
else:
initialW = conv.W.data
deconv = L.Deconvolution2D(out_channels, in_channels,
(kh, kw), stride=conv.stride,
pad=conv.pad,
initialW=initialW,
nobias=nobias)
if isinstance(conv.W.data, cuda.ndarray):
deconv.to_gpu()
self.add_link('de{}'.format(conv.name), deconv)
deconv_block.append(deconv)
self.deconv_blocks.append(deconv_block)
示例11: adaptive_softmax_cross_entropy
# 需要导入模块: from chainer import cuda [as 别名]
# 或者: from chainer.cuda import ndarray [as 别名]
def adaptive_softmax_cross_entropy(
x, t, Ws, Rs, cutoff, normalize=True,
ignore_label=-1, reduce='mean', enable_double_backprop=False):
"""Computes cross entropy loss for pre-softmax activations.
Args:
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`):
Variable holding a multidimensional array whose element indicates
hidden states: the first axis of the variable
represents the number of samples, and the second axis represents
the number of hidden units.
Ws (list of :class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`):
Variables of weight matrices for word outputs.
The first matrix is for the head.
The rest matrices are for the tails in order.
Rs (list of :class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`):
Variables of weight matrices for reducing hidden units.
The matrices are for the tails in order.
The number of matrices must be ``len(Ws) - 1``.
t (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`):
Variable holding an :class:`numpy.int32` vector of ground truth
labels. If ``t[i] == ignore_label``, corresponding ``x[i]`` is
ignored.
cutoff (list of int):
Cutoff indices of clusters. e.g. [0, 2000, 10000, n_vocab]
normalize (bool): If ``True``, this function normalizes the cross
entropy loss across all instances. If ``False``, it only
normalizes along a batch size.
ignore_label (int): Label value you want to ignore. Its default value
is ``-1``. See description of the argument `t`.
reduce (str): A string that determines whether to reduce the loss
values. If it is ``'mean'``, it computes the sum of the individual
cross entropy and normalize it according to ``normalize`` option.
If it is ``'no'``, this function computes cross entropy for each
instance and does not normalize it (``normalize`` option is
ignored). In this case, the loss value of the ignored instance,
which has ``ignore_label`` as its target value, is set to ``0``.
Returns:
~chainer.Variable: A variable holding a scalar array of the cross
entropy loss. If ``reduce`` is ``'mean'``, it is a scalar array.
If ``reduce`` is ``'no'``, the shape is same as that of ``x``.
"""
if enable_double_backprop:
raise NotImplementedError()
else:
return AdaptiveSoftmaxCrossEntropy(
cutoff, normalize=normalize,
ignore_label=ignore_label,
reduce=reduce)(
x, t, *Ws, *Rs)