本文整理汇总了Python中sonnet.Embed方法的典型用法代码示例。如果您正苦于以下问题:Python sonnet.Embed方法的具体用法?Python sonnet.Embed怎么用?Python sonnet.Embed使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sonnet
的用法示例。
在下文中一共展示了sonnet.Embed方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _construct_weights
# 需要导入模块: import sonnet [as 别名]
# 或者: from sonnet import Embed [as 别名]
def _construct_weights(self):
"""
Constructs the user/item memories and user/item external memory/outputs
Also add the embedding lookups
"""
self.user_memory = snt.Embed(self.config.user_count, self.config.embed_size,
initializers=self._embedding_initializers,
regularizers=self._embedding_regularizers,
name='MemoryEmbed')
self.item_memory = snt.Embed(self.config.item_count,
self.config.embed_size,
initializers=self._embedding_initializers,
regularizers=self._embedding_regularizers,
name="ItemMemory")
# [batch, embedding size]
self._cur_user = self.user_memory(self.input_users)
# Item memories a query
self._cur_item = self.item_memory(self.input_items)
self._cur_item_negative = self.item_memory(self.input_items_negative)
示例2: _instruction
# 需要导入模块: import sonnet [as 别名]
# 或者: from sonnet import Embed [as 别名]
def _instruction(self, instruction):
# Split string.
splitted = tf.string_split(instruction)
dense = tf.sparse_tensor_to_dense(splitted, default_value='')
length = tf.reduce_sum(tf.to_int32(tf.not_equal(dense, '')), axis=1)
# To int64 hash buckets. Small risk of having collisions. Alternatively, a
# vocabulary can be used.
num_hash_buckets = 1000
buckets = tf.string_to_hash_bucket_fast(dense, num_hash_buckets)
# Embed the instruction. Embedding size 20 seems to be enough.
embedding_size = 20
embedding = snt.Embed(num_hash_buckets, embedding_size)(buckets)
# Pad to make sure there is at least one output.
padding = tf.to_int32(tf.equal(tf.shape(embedding)[1], 0))
embedding = tf.pad(embedding, [[0, 0], [0, padding], [0, 0]])
core = tf.contrib.rnn.LSTMBlockCell(64, name='language_lstm')
output, _ = tf.nn.dynamic_rnn(core, embedding, length, dtype=tf.float32)
# Return last output.
return tf.reverse_sequence(output, length, seq_axis=1)[:, 0]
示例3: _make_encoder
# 需要导入模块: import sonnet [as 别名]
# 或者: from sonnet import Embed [as 别名]
def _make_encoder(self):
"""Constructs an encoding for a single character ID."""
embed = snt.Embed(
vocab_size=self.hparams.vocab_size + self.hparams.oov_buckets,
embed_dim=self.hparams.embed_size)
mlp = codec_mod.MLPObsEncoder(self.hparams)
return codec_mod.EncoderSequence([embed, mlp], name="obs_encoder")
示例4: _build
# 需要导入模块: import sonnet [as 别名]
# 或者: from sonnet import Embed [as 别名]
def _build(self, attribute_value):
int_attribute_value = tf.cast(attribute_value, dtype=tf.int32)
tf.summary.histogram('cat_attribute_value_histogram', int_attribute_value)
embedding = snt.Embed(self._num_categories, self._attr_embedding_dim)(int_attribute_value)
tf.summary.histogram('cat_embedding_histogram', embedding)
return tf.squeeze(embedding, axis=1)
示例5: embed_type
# 需要导入模块: import sonnet [as 别名]
# 或者: from sonnet import Embed [as 别名]
def embed_type(features, num_types, type_embedding_dim):
preexistance_feat = tf.expand_dims(tf.cast(features[:, 0], dtype=tf.float32), axis=1)
type_embedder = snt.Embed(num_types, type_embedding_dim)
norm = snt.LayerNorm()
type_embedding = norm(type_embedder(tf.cast(features[:, 1], tf.int32)))
tf.summary.histogram('type_embedding_histogram', type_embedding)
return tf.concat([preexistance_feat, type_embedding], axis=1)
示例6: _construct_weights
# 需要导入模块: import sonnet [as 别名]
# 或者: from sonnet import Embed [as 别名]
def _construct_weights(self):
"""
Constructs the user/item memories and user/item external memory/outputs
Also add the embedding lookups
"""
self.user_memory = snt.Embed(self.config.user_count, self.config.embed_size,
initializers=self._embedding_initializers,
name='MemoryEmbed')
self.user_output = snt.Embed(self.config.user_count, self.config.embed_size,
initializers=self._embedding_initializers,
name='MemoryOutput')
self.item_memory = snt.Embed(self.config.item_count,
self.config.embed_size,
initializers=self._embedding_initializers,
name="ItemMemory")
self._mem_layer = VariableLengthMemoryLayer(self.config.hops,
self.config.embed_size,
tf.nn.relu,
initializers=self._hops_init,
regularizers=self._regularizers,
name='UserMemoryLayer')
self._output_module = snt.Sequential([
DenseLayer(self.config.embed_size, True, tf.nn.relu,
initializers=self._initializers,
regularizers=self._regularizers,
name='Layer'),
snt.Linear(1, False,
initializers=self._output_initializers,
regularizers=self._regularizers,
name='OutputVector'),
tf.squeeze])
# [batch, embedding size]
self._cur_user = self.user_memory(self.input_users)
self._cur_user_output = self.user_output(self.input_users)
# Item memories a query
self._cur_item = self.item_memory(self.input_items)
self._cur_item_negative = self.item_memory(self.input_items_negative)
# Share Embeddings
self._cur_item_output = self._cur_item
self._cur_item_output_negative = self._cur_item_negative