当前位置: 首页>>代码示例>>Python>>正文


Python functional.selu方法代码示例

本文整理汇总了Python中torch.nn.functional.selu方法的典型用法代码示例。如果您正苦于以下问题:Python functional.selu方法的具体用法?Python functional.selu怎么用?Python functional.selu使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在torch.nn.functional的用法示例。


在下文中一共展示了functional.selu方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_selu_grad

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def test_selu_grad(N=50):
    from numpy_ml.neural_nets.activations import SELU

    N = np.inf if N is None else N

    mine = SELU()
    gold = torch_gradient_generator(F.selu)

    i = 0
    while i < N:
        n_ex = np.random.randint(1, 100)
        n_dims = np.random.randint(1, 100)
        z = random_tensor((n_ex, n_dims))
        assert_almost_equal(mine.grad(z), gold(z), decimal=6)
        print("PASSED")
        i += 1 
开发者ID:ddbourgin,项目名称:numpy-ml,代码行数:18,代码来源:test_nn_activations.py

示例2: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def forward(self, screen, variables):
        # cnn
        screen_features = F.selu(self.conv1(screen))
        screen_features = F.selu(self.conv2(screen_features))
        screen_features = F.selu(self.conv3(screen_features))
        screen_features = F.selu(self.conv4(screen_features))
        screen_features = F.selu(self.conv5(screen_features))
        screen_features = F.selu(self.conv6(screen_features))
        screen_features = screen_features.view(screen_features.size(0), -1)

        # features
        input = self.screen_features1(screen_features)
        input = self.batch_norm(input)
        input = F.selu(input)

        # action
        action = F.selu(self.action1(input))
        #action = torch.cat([action, variables], 1)
        action = self.action2(action)

        return action, input 
开发者ID:akolishchak,项目名称:doom-net-pytorch,代码行数:23,代码来源:aac.py

示例3: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def forward(self, screen, variables):
        # cnn
        screen_features = F.max_pool2d(screen, kernel_size=(20, 20), stride=(20, 20))
        screen_features = F.selu(self.conv1(screen_features))
        screen_features = F.selu(self.conv2(screen_features))
        screen_features = F.selu(self.conv3(screen_features))
        screen_features = screen_features.view(screen_features.size(0), -1)

        # features
        input = self.screen_features1(screen_features)
        input = self.batch_norm(input)
        input = F.selu(input)

        # action
        action = F.selu(self.action1(input))
        action = torch.cat([action, variables], 1)
        action = self.batch_norm_action(action)
        action = self.action2(action)

        return action, input 
开发者ID:akolishchak,项目名称:doom-net-pytorch,代码行数:22,代码来源:aac_depth.py

示例4: activation

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def activation(input, kind):
  #print("Activation: {}".format(kind))
  if kind == 'selu':
    return F.selu(input)
  elif kind == 'relu':
    return F.relu(input)
  elif kind == 'relu6':
    return F.relu6(input)
  elif kind == 'sigmoid':
    return F.sigmoid(input)
  elif kind == 'tanh':
    return F.tanh(input)
  elif kind == 'elu':
    return F.elu(input)
  elif kind == 'lrelu':
    return F.leaky_relu(input)
  elif kind == 'swish':
    return input*F.sigmoid(input)
  elif kind == 'none':
    return input
  else:
    raise ValueError('Unknown non-linearity type') 
开发者ID:NVIDIA,项目名称:DeepRecommender,代码行数:24,代码来源:model.py

示例5: readout

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def readout(h, h2):
  catted_reads = map(lambda x: torch.cat([h[x[0]], h2[x[1]]], 1), zip(h2.keys(), h.keys()))
  activated_reads = map(lambda x: F.selu( R(x) ), catted_reads)
  readout = Variable(torch.zeros(1, 128))
  for read in activated_reads:
    readout = readout + read
  return F.tanh( readout ) 
开发者ID:deepchem,项目名称:deepchem,代码行数:9,代码来源:mpnn.py

示例6: message_pass

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def message_pass(g, h, k):
  for v in g.keys():
    neighbors = g[v]
    for neighbor in neighbors:
      e_vw = neighbor[0] # feature variable
      w = neighbor[1]
      
      m_w = V[k](h[w])
      m_e_vw = E(e_vw)
      reshaped = torch.cat( (h[v], m_w, m_e_vw), 1)
      h[v] = F.selu(U[k](reshaped)) 
开发者ID:deepchem,项目名称:deepchem,代码行数:13,代码来源:mpnn.py

示例7: test_selu_activation

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def test_selu_activation(N=50):
    from numpy_ml.neural_nets.activations import SELU

    N = np.inf if N is None else N

    mine = SELU()
    gold = lambda z: F.selu(torch.FloatTensor(z)).numpy()

    i = 0
    while i < N:
        n_dims = np.random.randint(1, 100)
        z = random_stochastic_matrix(1, n_dims)
        assert_almost_equal(mine.fn(z), gold(z))
        print("PASSED")
        i += 1 
开发者ID:ddbourgin,项目名称:numpy-ml,代码行数:17,代码来源:test_nn_activations.py

示例8: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def forward(self, loc, tim):
        h1 = Variable(torch.zeros(1, 1, self.hidden_size))
        c1 = Variable(torch.zeros(1, 1, self.hidden_size))
        if self.use_cuda:
            h1 = h1.cuda()
            c1 = c1.cuda()

        loc_emb = self.emb_loc(loc)
        tim_emb = self.emb_tim(tim)
        x = torch.cat((loc_emb, tim_emb), 2)
        x = self.dropout(x)

        if self.rnn_type == 'GRU' or self.rnn_type == 'RNN':
            out, h1 = self.rnn(x, h1)
        elif self.rnn_type == 'LSTM':
            out, (h1, c1) = self.rnn(x, (h1, c1))
        out = out.squeeze(1)
        out = F.selu(out)
        out = self.dropout(out)

        y = self.fc(out)
        score = F.log_softmax(y)  # calculate loss by NLLoss
        return score


# ############# rnn model with attention ####################### # 
开发者ID:vonfeng,项目名称:DeepMove,代码行数:28,代码来源:model.py

示例9: __init__

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def __init__(self):
        super(LayerTest, self).__init__()
        self.selu = nn.SELU() 
开发者ID:nerox8664,项目名称:pytorch2keras,代码行数:5,代码来源:selu.py

示例10: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def forward(self, x):
        x = self.selu(x)
        return x 
开发者ID:nerox8664,项目名称:pytorch2keras,代码行数:5,代码来源:selu.py

示例11: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def forward(self, x1, x2, y):
        # x1 = self.sm(self.fc(self.sma(x1)))
        # x2 = self.sm(self.fc(self.sma(x2)))
        # y = self.sm(self.fc(self.sma(y)))

        # x1 = self.sm(self.fc(x1))
        # x2 = self.sm(self.fc(x2))
        # y = self.sm(self.fc(y))

        x1 = F.selu(self.fc(self.sma(x1)))
        x2 = F.selu(self.fc(self.sma(x2)))
        y = F.selu(self.fc(self.sma(y)))
        return x1, x2, y 
开发者ID:xwzy,项目名称:Triplet-deep-hash-pytorch,代码行数:15,代码来源:hashNet.py

示例12: selu

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def selu(self):
        return self.applyMonotone(F.selu) 
开发者ID:eth-sri,项目名称:diffai,代码行数:4,代码来源:ai.py

示例13: lambda_prediction

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def lambda_prediction(self, r, level):
        """
        Predict lambda weight for Levenberg-Marquardt update
        :param r: residual error with dim: (N, C, M)
        :param level: pyramid level used in this iteration, int
        :return: lambda weight, dim: (N, 6)
        """
        avg_r = torch.mean(torch.abs(r), dim=2)                 # (N, C)
        lambda_fc = getattr(self, 'lambda_fc_' + str(level))
        lambda_w = F.selu(lambda_fc(avg_r)) + 2.0               # (N, 6)
        return lambda_w 
开发者ID:sfu-gruvi-3dv,项目名称:sanet_relocal_demo,代码行数:13,代码来源:ba_tracknet_mirror_b.py

示例14: test_selu

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def test_selu(self):
        inp = torch.randn(1, 3, 32, 32, device='cuda', dtype=self.dtype)
        output = F.selu(inp) 
开发者ID:NVIDIA,项目名称:apex,代码行数:5,代码来源:test_pyprof_nvtx.py

示例15: __init__

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import selu [as 别名]
def __init__(self):
        super(LayerSELUTest, self).__init__()
        self.selu = nn.SELU() 
开发者ID:nerox8664,项目名称:onnx2keras,代码行数:5,代码来源:selu.py


注:本文中的torch.nn.functional.selu方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。