本文整理汇总了Python中tensorflow_fold.FromTensor方法的典型用法代码示例。如果您正苦于以下问题:Python tensorflow_fold.FromTensor方法的具体用法?Python tensorflow_fold.FromTensor怎么用?Python tensorflow_fold.FromTensor使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow_fold
的用法示例。
在下文中一共展示了tensorflow_fold.FromTensor方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: coding_blk
# 需要导入模块: import tensorflow_fold [as 别名]
# 或者: from tensorflow_fold import FromTensor [as 别名]
def coding_blk():
"""Input: node dict
Output: TensorType([1, hyper.word_dim])
"""
Wcomb1 = param.get('Wcomb1')
Wcomb2 = param.get('Wcomb2')
blk = td.Composition()
with blk.scope():
direct = embedding.direct_embed_blk().reads(blk.input)
composed = embedding.composed_embed_blk().reads(blk.input)
Wcomb1 = td.FromTensor(param.get('Wcomb1'))
Wcomb2 = td.FromTensor(param.get('Wcomb2'))
direct = td.Function(embedding.batch_mul).reads(direct, Wcomb1)
composed = td.Function(embedding.batch_mul).reads(composed, Wcomb2)
added = td.Function(tf.add).reads(direct, composed)
blk.output.reads(added)
return blk
示例2: continous_weighted_add_blk
# 需要导入模块: import tensorflow_fold [as 别名]
# 或者: from tensorflow_fold import FromTensor [as 别名]
def continous_weighted_add_blk():
block = td.Composition(name='continous_weighted_add')
with block.scope():
initial = td.GetItem(0).reads(block.input)
cur = td.GetItem(1).reads(block.input)
last = td.GetItem(0).reads(initial)
idx = td.GetItem(1).reads(initial)
cur_fea = td.GetItem(0).reads(cur)
cur_clen = td.GetItem(1).reads(cur)
pclen = td.GetItem(2).reads(cur)
Wi = linear_combine_blk().reads(cur_clen, pclen, idx)
weighted_fea = td.Function(batch_mul).reads(cur_fea, Wi)
block.output.reads(
td.Function(tf.add, name='add_last_weighted_fea').reads(last, weighted_fea),
# XXX: rewrite using tf.range
td.Function(tf.add, name='add_idx_1').reads(idx, td.FromTensor(tf.constant(1.)))
)
return block
示例3: l2loss_blk
# 需要导入模块: import tensorflow_fold [as 别名]
# 或者: from tensorflow_fold import FromTensor [as 别名]
def l2loss_blk():
# rewrite using metric
leaf_case = td.Composition()
with leaf_case.scope():
leaf_case.output.reads(td.FromTensor(tf.constant(1.)))
nonleaf_case = td.Composition()
with nonleaf_case.scope():
direct = direct_embed_blk().reads(nonleaf_case.input)
com = composed_embed_blk().reads(nonleaf_case.input)
loss = td.Function(batch_nn_l2loss).reads(direct, com)
nonleaf_case.output.reads(loss)
return td.OneOf(lambda node: node['clen'] != 0,
{False: leaf_case, True: nonleaf_case})
# generalize to tree_reduce, accepts one block that takes two node, returns a value
示例4: feature_detector_blk
# 需要导入模块: import tensorflow_fold [as 别名]
# 或者: from tensorflow_fold import FromTensor [as 别名]
def feature_detector_blk(max_depth=2):
"""Input: node dict
Output: TensorType([hyper.conv_dim, ])
Single patch of the conv. Depth is max_depth
"""
blk = td.Composition()
with blk.scope():
nodes_in_patch = collect_node_for_conv_patch_blk(max_depth=max_depth).reads(blk.input)
# map from python object to tensors
mapped = td.Map(td.Record((coding_blk(), td.Scalar(), td.Scalar(),
td.Scalar(), td.Scalar()))).reads(nodes_in_patch)
# mapped = [(feature, idx, depth, max_depth), (...)]
# compute weighted feature for each elem
weighted = td.Map(weighted_feature_blk()).reads(mapped)
# weighted = [fea, fea, fea, ...]
# add together
added = td.Reduce(td.Function(tf.add)).reads(weighted)
# added = TensorType([hyper.conv_dim, ])
# add bias
biased = td.Function(tf.add).reads(added, td.FromTensor(param.get('Bconv')))
# biased = TensorType([hyper.conv_dim, ])
# tanh
tanh = td.Function(tf.nn.tanh).reads(biased)
# tanh = TensorType([hyper.conv_dim, ])
blk.output.reads(tanh)
return blk
# generalize to tree_fold, accepts one block that takes two node, returns a value
示例5: composed_embed_blk
# 需要导入模块: import tensorflow_fold [as 别名]
# 或者: from tensorflow_fold import FromTensor [as 别名]
def composed_embed_blk():
leaf_case = direct_embed_blk()
nonleaf_case = td.Composition(name='composed_embed_nonleaf')
with nonleaf_case.scope():
children = td.GetItem('children').reads(nonleaf_case.input)
clen = td.Scalar().reads(td.GetItem('clen').reads(nonleaf_case.input))
cclens = td.Map(td.GetItem('clen') >> td.Scalar()).reads(children)
fchildren = td.Map(direct_embed_blk()).reads(children)
initial_state = td.Composition()
with initial_state.scope():
initial_state.output.reads(
td.FromTensor(tf.zeros(hyper.word_dim)),
td.FromTensor(tf.zeros([])),
)
summed = td.Zip().reads(fchildren, cclens, td.Broadcast().reads(clen))
summed = td.Fold(continous_weighted_add_blk(), initial_state).reads(summed)[0]
added = td.Function(tf.add, name='add_bias').reads(summed, td.FromTensor(param.get('B')))
normed = clip_by_norm_blk().reads(added)
act_fn = tf.nn.relu if hyper.use_relu else tf.nn.tanh
relu = td.Function(act_fn).reads(normed)
nonleaf_case.output.reads(relu)
return td.OneOf(lambda node: node['clen'] == 0,
{True: leaf_case, False: nonleaf_case})