本文整理汇总了Python中chainer.functions.hstack方法的典型用法代码示例。如果您正苦于以下问题:Python functions.hstack方法的具体用法?Python functions.hstack怎么用?Python functions.hstack使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.functions
的用法示例。
在下文中一共展示了functions.hstack方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def forward(self, x, y):
y1 = F.hstack((x, y))
return y1
示例2: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def forward(self, inputs, device):
y = functions.hstack(inputs)
return y,
示例3: forward_expected
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def forward_expected(self, inputs):
y = numpy.hstack(inputs)
return y,
示例4: check_value_check
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def check_value_check(self):
if self.valid:
# Check if it throws nothing
functions.hstack(self.xs)
else:
with self.assertRaises(type_check.InvalidType):
functions.hstack(self.xs)
示例5: flatten_graph_data
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def flatten_graph_data(adj, x):
return F.hstack((F.reshape(adj,[adj.shape[0], -1]),
F.reshape(x, [x.shape[0], -1])))
示例6: greedy_actions
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def greedy_actions(self):
actions = []
for branch in self.branches:
actions.append(branch.q_values.array.argmax(axis=1).reshape(-1, 1))
return F.hstack(actions)
示例7: max
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def max(self):
chosen_q_values = []
for branch in self.branches:
chosen_q_values.append(branch.max.reshape(-1, 1))
return F.hstack(chosen_q_values)
示例8: evaluate_actions
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def evaluate_actions(self, actions):
branch_q_values = []
for i, branch in enumerate(self.branches):
branch_actions = actions[:, i]
branch_q_values.append(branch.evaluate_actions(
branch_actions).reshape(-1, 1))
return F.hstack(branch_q_values)
示例9: calculate_all_attentions
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import hstack [as 别名]
def calculate_all_attentions(self, hs, ys):
"""Calculate all of attentions.
Args:
hs (list of chainer.Variable | N-dimensional array):
Input variable from encoder.
ys (list of chainer.Variable | N-dimensional array):
Input variable of decoder.
Returns:
chainer.Variable: List of attention weights.
"""
# prepare input and output word sequences with sos/eos IDs
eos = self.xp.array([self.eos], "i")
sos = self.xp.array([self.sos], "i")
ys_in = [F.concat([sos, y], axis=0) for y in ys]
ys_out = [F.concat([y, eos], axis=0) for y in ys]
# padding for ys with -1
# pys: utt x olen
pad_ys_in = F.pad_sequence(ys_in, padding=self.eos)
pad_ys_out = F.pad_sequence(ys_out, padding=-1)
# get length info
olength = pad_ys_out.shape[1]
# initialization
c_list = [None] # list of cell state of each layer
z_list = [None] # list of hidden state of each layer
for _ in six.moves.range(1, self.dlayers):
c_list.append(None)
z_list.append(None)
att_w = None
att_ws = []
self.att.reset() # reset pre-computation of h
# pre-computation of embedding
eys = self.embed(pad_ys_in) # utt x olen x zdim
eys = F.separate(eys, axis=1)
# loop for an output sequence
for i in six.moves.range(olength):
att_c, att_w = self.att(hs, z_list[0], att_w)
ey = F.hstack((eys[i], att_c)) # utt x (zdim + hdim)
z_list, c_list = self.rnn_forward(ey, z_list, c_list, z_list, c_list)
att_ws.append(att_w) # for debugging
att_ws = F.stack(att_ws, axis=1)
att_ws.to_cpu()
return att_ws.data