本文整理汇总了Python中blocks.initialization.Uniform方法的典型用法代码示例。如果您正苦于以下问题:Python initialization.Uniform方法的具体用法?Python initialization.Uniform怎么用?Python initialization.Uniform使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类blocks.initialization
的用法示例。
在下文中一共展示了initialization.Uniform方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_uniform
# 需要导入模块: from blocks import initialization [as 别名]
# 或者: from blocks.initialization import Uniform [as 别名]
def test_uniform():
rng = numpy.random.RandomState(1)
def check_uniform(rng, mean, width, std, shape):
weights = Uniform(mean=mean, width=width,
std=std).generate(rng, shape)
assert weights.shape == shape
assert weights.dtype == theano.config.floatX
assert_allclose(weights.mean(), mean, atol=1e-2)
if width is not None:
std_ = width / numpy.sqrt(12)
else:
std_ = std
assert_allclose(std_, weights.std(), atol=1e-2)
yield check_uniform, rng, 0, 0.05, None, (500, 600)
yield check_uniform, rng, 0, None, 0.001, (600, 500)
yield check_uniform, rng, 5, None, 0.004, (700, 300)
assert_raises(ValueError, Uniform, 0, 1, 1)
示例2: setUp
# 需要导入模块: from blocks import initialization [as 别名]
# 或者: from blocks.initialization import Uniform [as 别名]
def setUp(self):
self.readout = SoftmaxReadout(
input_names=['states1', 'states2'],
num_tokens=4, input_dims=[2, 3],
weights_init=Uniform(width=1.0),
biases_init=Uniform(width=1.0),
seed=1)
self.readout.initialize()
self.states1 = numpy.array(
[[[1., 2.]], [[2., 1.]]],
dtype=theano.config.floatX)
self.states2 = numpy.array(
[[[3., 4., 5.]], [[5., 4., 3.]]],
dtype=theano.config.floatX)
self.merged = (
self.states1.dot(self.readout.merge.children[0].W.get_value()) +
self.states2.dot(self.readout.merge.children[1].W.get_value()) +
self.readout.post_merge.parameters[0].get_value())
示例3: initialize
# 需要导入模块: from blocks import initialization [as 别名]
# 或者: from blocks.initialization import Uniform [as 别名]
def initialize(to_init):
for bricks in to_init:
bricks.weights_init = initialization.Uniform(width=0.08)
bricks.biases_init = initialization.Constant(0)
bricks.initialize()
示例4: initialize_data_and_model
# 需要导入模块: from blocks import initialization [as 别名]
# 或者: from blocks.initialization import Uniform [as 别名]
def initialize_data_and_model(config, train_phase, layout='dict'):
c = config
fuel_path = fuel.config.data_path[0]
vocab_main = None
vocab_keys = None
if not c['encoder']:
if not c['vocab_keys_path']:
raise ValueError('Error: Should specify vocab_keys_path when no encoder')
vocab_keys = Vocabulary(
os.path.join(fuel.config.data_path[0], c['vocab_keys_path']))
if c['vocab_path']:
vocab_main = Vocabulary(
os.path.join(fuel.config.data_path[0], c['vocab_path']))
# TODO: change name of class LanguageModellingData... very ill-named.
data = LanguageModellingData(c['data_path'], layout, vocab=vocab_main)
vocab_main = data.vocab
model = Seq2Seq(c['emb_dim'], c['dim'], c['num_input_words'],
c['num_output_words'], data.vocab,
proximity_coef = c['proximity_coef'],
proximity_distance = c['proximity_distance'],
encoder = c['encoder'],
decoder = c['decoder'],
shared_rnn = c['shared_rnn'],
translate_layer = c['translate_layer'],
word_dropout = c['word_dropout'],
tied_in_out = c['tied_in_out'],
vocab_keys = vocab_keys,
reconstruction_coef = c['reconstruction_coef'],
provide_targets = c['provide_targets'],
weights_init=Uniform(width=0.1),
biases_init=Constant(0.))
model.initialize()
if c['embedding_path'] and ((train_phase or c['freeze_pretrained']) or
c['provide_targets']):
if c['provide_targets'] and c['freeze_pretrained']:
raise ValueError("Can't provide_targets and use freeze_pretrained."
"In that case, simply use freeze_pretrained")
# if encoder embeddings are frozen, then we should load them
# as they're not saved with the models parameters
emb_full_path = os.path.join(fuel_path, c['embedding_path'])
embedding_matrix = numpy.load(emb_full_path)
if c['provide_targets']:
model.set_def_embeddings(embedding_matrix, 'target')
logger.debug("Pre-trained targets loaded")
else:
model.set_def_embeddings(embedding_matrix, 'main')
logger.debug("Pre-trained encoder embeddings loaded")
return data, model