本文整理汇总了Python中pylearn2.linear.matrixmul.MatrixMul.lmul_T方法的典型用法代码示例。如果您正苦于以下问题:Python MatrixMul.lmul_T方法的具体用法?Python MatrixMul.lmul_T怎么用?Python MatrixMul.lmul_T使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pylearn2.linear.matrixmul.MatrixMul
的用法示例。
在下文中一共展示了MatrixMul.lmul_T方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: IsingHidden
# 需要导入模块: from pylearn2.linear.matrixmul import MatrixMul [as 别名]
# 或者: from pylearn2.linear.matrixmul.MatrixMul import lmul_T [as 别名]
#.........这里部分代码省略.........
('mean_x.min_u', v_mean.min())
]:
rval[prefix+key] = val
return rval
def sample(self, state_below = None, state_above = None,
layer_above = None,
theano_rng = None):
if theano_rng is None:
raise ValueError("theano_rng is required; it just defaults to None so that it may appear after layer_above / state_above in the list.")
if state_above is not None:
msg = layer_above.downward_message(state_above)
else:
msg = None
if self.requires_reformat:
state_below = self.input_space.format_as(state_below, self.desired_space)
z = self.transformer.lmul(state_below) + self.b
if msg != None:
z = z + msg
on_prob = T.nnet.sigmoid(2. * z)
samples = theano_rng.binomial(p = on_prob, n=1, size=on_prob.shape, dtype=on_prob.dtype) * 2. - 1.
return samples
def downward_message(self, downward_state):
rval = self.transformer.lmul_T(downward_state)
if self.requires_reformat:
rval = self.desired_space.format_as(rval, self.input_space)
return rval
def init_mf_state(self):
raise NotImplementedError("This is just a copy-paste of BVMP")
# work around theano bug with broadcasted vectors
z = T.alloc(0., self.dbm.batch_size, self.detector_layer_dim).astype(self.b.dtype) + \
self.b.dimshuffle('x', 0)
rval = max_pool_channels(z = z,
pool_size = self.pool_size)
return rval
def make_state(self, num_examples, numpy_rng):
""" Returns a shared variable containing an actual state
(not a mean field state) for this variable.
"""
driver = numpy_rng.uniform(0.,1., (num_examples, self.dim))
on_prob = sigmoid_numpy(2. * self.b.get_value())
sample = 2. * (driver < on_prob) - 1.
rval = sharedX(sample, name = 'v_sample_shared')
return rval
def make_symbolic_state(self, num_examples, theano_rng):
mean = T.nnet.sigmoid(2. * self.b)
rval = theano_rng.binomial(size=(num_examples, self.nvis), p=mean)
rval = 2. * (rval) - 1.
示例2: RBM
# 需要导入模块: from pylearn2.linear.matrixmul import MatrixMul [as 别名]
# 或者: from pylearn2.linear.matrixmul.MatrixMul import lmul_T [as 别名]
#.........这里部分代码省略.........
training examples and the second indexing data dimensions.
Returns
-------
a : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input to each
hidden unit for each training example.
"""
if isinstance(v, tensor.Variable):
return self.bias_hid + self.transformer.lmul(v)
else:
return [self.input_to_h_from_v(vis) for vis in v]
def input_to_v_from_h(self, h):
"""
Compute the affine function (linear map plus bias) that serves as
input to the visible layer in an RBM.
Parameters
----------
h : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the one or several
minibatches on the hidden units, with the first dimension indexing
training examples and the second indexing data dimensions.
Returns
-------
a : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input to each
visible unit for each row of h.
"""
if isinstance(h, tensor.Variable):
return self.bias_vis + self.transformer.lmul_T(h)
else:
return [self.input_to_v_from_h(hid) for hid in h]
def upward_pass(self, v):
"""
wrapper around mean_h_given_v method. Called when RBM is accessed
by mlp.HiddenLayer.
"""
return self.mean_h_given_v(v)
def mean_h_given_v(self, v):
"""
Compute the mean activation of the hidden units given visible unit
configurations for a set of training examples.
Parameters
----------
v : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the hidden unit
states for a batch (or several) of training examples, with the
first dimension indexing training examples and the second indexing
data dimensions.
Returns
-------
h : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the mean
(deterministic) hidden unit activations given the visible units.
"""
if isinstance(v, tensor.Variable):
return nnet.sigmoid(self.input_to_h_from_v(v))
else:
示例3: BinaryVectorMaxPool
# 需要导入模块: from pylearn2.linear.matrixmul import MatrixMul [as 别名]
# 或者: from pylearn2.linear.matrixmul.MatrixMul import lmul_T [as 别名]
#.........这里部分代码省略.........
for s, t, c, e in safe_zip(state, target, coeff, eps):
assert all([isinstance(elem, float) for elem in [t, c, e]])
if c == 0.:
continue
m = s.mean(axis=0)
assert m.ndim == 1
rval += T.maximum(abs(m-t)-e,0.).mean()*c
return rval
def sample(self, state_below = None, state_above = None,
layer_above = None,
theano_rng = None):
if theano_rng is None:
raise ValueError("theano_rng is required; it just defaults to None so that it may appear after layer_above / state_above in the list.")
if state_above is not None:
msg = layer_above.downward_message(state_above)
else:
msg = None
if self.requires_reformat:
state_below = self.input_space.format_as(state_below, self.desired_space)
z = self.transformer.lmul(state_below) + self.b
p, h, p_sample, h_sample = max_pool_channels(z,
self.pool_size, msg, theano_rng)
return p_sample, h_sample
def downward_message(self, downward_state):
rval = self.transformer.lmul_T(downward_state)
if self.requires_reformat:
rval = self.desired_space.format_as(rval, self.input_space)
return rval
def make_state(self, num_examples, numpy_rng):
""" Returns a shared variable containing an actual state
(not a mean field state) for this variable.
"""
t1 = time.time()
empty_input = self.h_space.get_origin_batch(num_examples)
h_state = sharedX(empty_input)
default_z = T.zeros_like(h_state) + self.b
theano_rng = MRG_RandomStreams(numpy_rng.randint(2 ** 16))
p_exp, h_exp, p_sample, h_sample = max_pool_channels(
z = default_z,
pool_size = self.pool_size,
theano_rng = theano_rng)
assert h_sample.dtype == default_z.dtype
p_state = sharedX( self.output_space.get_origin_batch(
num_examples))
t2 = time.time()