本文整理汇总了Python中blocks.bricks.NDimensionalSoftmax.log_probabilities方法的典型用法代码示例。如果您正苦于以下问题:Python NDimensionalSoftmax.log_probabilities方法的具体用法?Python NDimensionalSoftmax.log_probabilities怎么用?Python NDimensionalSoftmax.log_probabilities使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类blocks.bricks.NDimensionalSoftmax
的用法示例。
在下文中一共展示了NDimensionalSoftmax.log_probabilities方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ShallowFusionReadout
# 需要导入模块: from blocks.bricks import NDimensionalSoftmax [as 别名]
# 或者: from blocks.bricks.NDimensionalSoftmax import log_probabilities [as 别名]
class ShallowFusionReadout(Readout):
def __init__(self, lm_costs_name, lm_weight,
normalize_am_weights=False,
normalize_lm_weights=False,
normalize_tot_weights=True,
am_beta=1.0,
**kwargs):
super(ShallowFusionReadout, self).__init__(**kwargs)
self.lm_costs_name = lm_costs_name
self.lm_weight = lm_weight
self.normalize_am_weights = normalize_am_weights
self.normalize_lm_weights = normalize_lm_weights
self.normalize_tot_weights = normalize_tot_weights
self.am_beta = am_beta
self.softmax = NDimensionalSoftmax()
self.children += [self.softmax]
@application
def readout(self, **kwargs):
lm_costs = -kwargs.pop(self.lm_costs_name)
if self.normalize_lm_weights:
lm_costs = self.softmax.log_probabilities(
lm_costs, extra_ndim=lm_costs.ndim - 2)
am_pre_softmax = self.am_beta * super(ShallowFusionReadout, self).readout(**kwargs)
if self.normalize_am_weights:
am_pre_softmax = self.softmax.log_probabilities(
am_pre_softmax, extra_ndim=am_pre_softmax.ndim - 2)
x = am_pre_softmax + self.lm_weight * lm_costs
if self.normalize_tot_weights:
x = self.softmax.log_probabilities(x, extra_ndim=x.ndim - 2)
return x
示例2: SoftmaxEmitter
# 需要导入模块: from blocks.bricks import NDimensionalSoftmax [as 别名]
# 或者: from blocks.bricks.NDimensionalSoftmax import log_probabilities [as 别名]
class SoftmaxEmitter(AbstractEmitter, Initializable, Random):
"""A softmax emitter for the case of integer outputs.
Interprets readout elements as energies corresponding to their indices.
Parameters
----------
initial_output : int or a scalar :class:`~theano.Variable`
The initial output.
"""
def __init__(self, initial_output=0, **kwargs):
super(SoftmaxEmitter, self).__init__(**kwargs)
self.initial_output = initial_output
self.softmax = NDimensionalSoftmax()
self.children = [self.softmax]
@application
def probs(self, readouts):
return self.softmax.apply(readouts, extra_ndim=readouts.ndim - 2)
@application
def emit(self, readouts):
probs = self.probs(readouts)
batch_size = probs.shape[0]
pvals_flat = probs.reshape((batch_size, -1))
generated = self.theano_rng.multinomial(pvals=pvals_flat)
return generated.reshape(probs.shape).argmax(axis=-1)
@application
def cost(self, readouts, outputs):
# WARNING: unfortunately this application method works
# just fine when `readouts` and `outputs` have
# different dimensions. Be careful!
return self.softmax.categorical_cross_entropy(
outputs, readouts, extra_ndim=readouts.ndim - 2)
@application
def costs(self, readouts):
return -self.softmax.log_probabilities(
readouts, extra_ndim=readouts.ndim - 2)
@application
def initial_outputs(self, batch_size):
return self.initial_output * tensor.ones((batch_size,), dtype='int64')
def get_dim(self, name):
if name == 'outputs':
return 0
return super(SoftmaxEmitter, self).get_dim(name)
示例3: softmax_layer
# 需要导入模块: from blocks.bricks import NDimensionalSoftmax [as 别名]
# 或者: from blocks.bricks.NDimensionalSoftmax import log_probabilities [as 别名]
def softmax_layer(self, h, y):
"""
Perform Softmax over the hidden state in order to
predict the next word in the sequence and compute
the loss.
:param h The hidden state sequence
:param y The target words
"""
hidden_to_output = Linear(name='hidden_to_output', input_dim=self.hidden_size,
output_dim=self.vocab_size)
initialize(hidden_to_output, sqrt(6.0 / (self.hidden_size + self.vocab_size)))
linear_output = hidden_to_output.apply(h)
linear_output.name = 'linear_output'
softmax = NDimensionalSoftmax(name="lm_softmax")
y_hat = softmax.log_probabilities(linear_output, extra_ndim=1)
y_hat.name = 'y_hat'
cost = softmax.categorical_cross_entropy(y, linear_output, extra_ndim=1).mean()
cost.name = 'cost'
return y_hat, cost