當前位置: 首頁>>代碼示例>>Python>>正文


Python activations.get方法代碼示例

本文整理匯總了Python中keras.layers.activations.get方法的典型用法代碼示例。如果您正苦於以下問題:Python activations.get方法的具體用法?Python activations.get怎麽用?Python activations.get使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在keras.layers.activations的用法示例。


在下文中一共展示了activations.get方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self,output_dim,mem_vec_dim,init='glorot_uniform', activation='linear', weights=None,
                 activity_regularizer=None,input_dim=None, **kwargs):
        '''
        Params:
            output_dim: ?????
            mem_vec_dim: query?????
            
        '''
        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.output_dim = output_dim
        self.input_dim = input_dim
        self.mem_vector_dim=mem_vec_dim
        
        self.activity_regularizer = regularizers.get(activity_regularizer)


        self.initial_weights = weights

        if self.input_dim:
            kwargs['input_shape'] = (self.input_dim,)
        super(MemoryNet,self).__init__(**kwargs) 
開發者ID:LibCorner,項目名稱:Keras_note,代碼行數:24,代碼來源:memory_layers.py

示例2: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 activation='tanh', inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W = dropout_W
        self.dropout_U = dropout_U

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(MGU, self).__init__(**kwargs) 
開發者ID:senochow,項目名稱:text_classification,代碼行數:21,代碼來源:my_recurrent.py

示例3: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, output_dim, attention_vec, attn_activation='tanh', single_attention_param=False, **kwargs):
        self.attention_vec = attention_vec
        self.attn_activation = activations.get(attn_activation)
        self.single_attention_param = single_attention_param

        super(AttentionLSTM, self).__init__(output_dim, **kwargs) 
開發者ID:wentaozhu,項目名稱:recurrent-attention-for-QA-SQUAD-based-on-keras,代碼行數:8,代碼來源:layers.py

示例4: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, nb_filter, filter_length,
                 init='glorot_uniform', activation=None, weights=None,
                 border_mode='valid', subsample_length=1,
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, input_dim=None, input_length=None, **kwargs):
        if border_mode != 'valid':
            raise Exception('Invalid border mode for LocallyConnected1D '
                            '(only "valid" is supported):', border_mode)
        self.nb_filter = nb_filter
        self.filter_length = filter_length
        self.init = initializations.get(init, dim_ordering='th')
        self.activation = activations.get(activation)

        self.border_mode = border_mode
        self.subsample_length = subsample_length

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.input_spec = [InputSpec(ndim=3)]
        self.initial_weights = weights
        self.input_dim = input_dim
        self.input_length = input_length
        if self.input_dim:
            kwargs['input_shape'] = (self.input_length, self.input_dim)
        super(LocallyConnected1D, self).__init__(**kwargs) 
開發者ID:GeekLiB,項目名稱:keras,代碼行數:34,代碼來源:local.py

示例5: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, layer, attention_vec, attn_activation='tanh', single_attention_param=False, **kwargs):
        assert isinstance(layer, LSTM) or isinstance(layer, GRU)
        super(AttentionWrapper, self).__init__(layer, **kwargs)
        self.supports_masking = True
        self.attention_vec = attention_vec
        self.attn_activation = activations.get(attn_activation)
        self.single_attention_param = single_attention_param 
開發者ID:saurabhmathur96,項目名稱:Neural-Chatbot,代碼行數:9,代碼來源:sequence_blocks.py

示例6: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, nb_filter, filter_length,
                 init='glorot_uniform', activation=None, weights=None,
                 border_mode='valid', subsample_length=1,
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, input_dim=None, input_length=None, **kwargs):
        if border_mode != 'valid':
            raise ValueError('Invalid border mode for LocallyConnected1D '
                             '(only "valid" is supported):', border_mode)
        self.nb_filter = nb_filter
        self.filter_length = filter_length
        self.init = initializations.get(init, dim_ordering='th')
        self.activation = activations.get(activation)

        self.border_mode = border_mode
        self.subsample_length = subsample_length

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.input_spec = [InputSpec(ndim=3)]
        self.initial_weights = weights
        self.input_dim = input_dim
        self.input_length = input_length
        if self.input_dim:
            kwargs['input_shape'] = (self.input_length, self.input_dim)
        super(LocallyConnected1D, self).__init__(**kwargs) 
開發者ID:ambrite,項目名稱:keras-customized,代碼行數:34,代碼來源:local.py

示例7: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self,output_dim,attention_vec,attn_activation='tanh',
                 attn_inner_activation='tanh', single_attn=False,
                 n_attention_dim=None,**kwargs):
        '''
            attention_vec: ???????attention????????????????attention?? 
            ??attention_vec=None,????attention
        '''
        self.attention_vec=attention_vec
        self.attn_activation = activations.get(attn_activation)
        self.attn_inner_activation = activations.get(attn_inner_activation)
        self.single_attention_param = single_attn
        self.n_attention_dim = output_dim if n_attention_dim is None else n_attention_dim
        super(AttentionLSTM,self).__init__(output_dim,**kwargs) 
開發者ID:LibCorner,項目名稱:Keras_note,代碼行數:15,代碼來源:AttentionLSTM.py

示例8: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self,output_dim,att_dim,attn_activation='tanh',
                 attn_inner_activation='tanh',
                 single_attn=False,**kwargs):
        '''
            attention_vec: ???????attention????????????????attention??
            single_attention_param: ????t,??????????????attention?
        '''
        self.attn_activation=activations.get(attn_activation)
        self.attn_inner_activation=activations.get(attn_inner_activation)
        self.single_attention_param=single_attn
        self.input_spec=None
        self.att_dim=att_dim
        super(AttentionLSTM,self).__init__(output_dim,**kwargs) 
開發者ID:LibCorner,項目名稱:Keras_note,代碼行數:15,代碼來源:attention_layers.py

示例9: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, output_dim,
               init='glorot_uniform', inner_init='orthogonal',
               activation='tanh', inner_activation='hard_sigmoid', **kwargs):
    self.output_dim       = output_dim
    self.init             = initializations.get(init)
    self.inner_init       = initializations.get(inner_init)
    self.activation       = activations.get(activation)
    self.inner_activation = activations.get(inner_activation)
    super(DecoderGRU, self).__init__(**kwargs) 
開發者ID:milankinen,項目名稱:c2w2c,代碼行數:11,代碼來源:decoder.py

示例10: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, output_dim, attention_vec, attn_activation='tanh',
                 attn_inner_activation='tanh', single_attn=False,
                 n_attention_dim=None, **kwargs):
        self.attention_vec = attention_vec
        self.attn_activation = activations.get(attn_activation)
        self.attn_inner_activation = activations.get(attn_inner_activation)
        self.single_attention_param = single_attn
        self.n_attention_dim = output_dim if n_attention_dim is None else n_attention_dim

        super(AttentionLSTM, self).__init__(output_dim, **kwargs) 
開發者ID:eshijia,項目名稱:knowledge-graph-keras,代碼行數:12,代碼來源:attention_lstm.py

示例11: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, nb_filter, shared_pool, nb_row=1, nb_col=1,
                 init='glorot_uniform', activation='linear', weights=None,
                 border_mode='valid', subsample=(1, 1),
                 dim_ordering=K.image_dim_ordering(),
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, **kwargs):
        if border_mode != 'valid':
            raise Exception('Invalid border mode for Convolution2D '
                            '(only "valid" is supported):', border_mode)
        if tuple(subsample) != (nb_row,nb_col): #model.to_json saves subsample as list and not as tuple
            raise Exception('Local layer only works with equal filter dimensions and strides')
        self.nb_filter = nb_filter
        self.shared_pool = shared_pool
        self.nb_row = nb_row
        self.nb_col = nb_col
        self.init = initializations.get(init, dim_ordering=dim_ordering)
        self.activation = activations.get(activation)

        self.border_mode = border_mode
        self.subsample = tuple(subsample)
        assert dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}'
        self.dim_ordering = dim_ordering

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.input_spec = [InputSpec(ndim=4)]
        self.initial_weights = weights
        super(SemiShared, self).__init__(**kwargs) 
開發者ID:Simoncarbo,項目名稱:Ultras-Sound-Nerve-Segmentation---Kaggle,代碼行數:37,代碼來源:layers.py

示例12: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, nb_filter, filter_length,
                 init='uniform', activation='linear', weights=None,
                 border_mode='valid', subsample_length=1,
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, input_dim=None, input_length=None, **kwargs):
        if border_mode != 'valid':
            raise Exception('Invalid border mode for LocallyConnected1D '
                            '(only "valid" is supported):', border_mode)
        self.nb_filter = nb_filter
        self.filter_length = filter_length
        self.init = initializations.get(init, dim_ordering='th')
        self.activation = activations.get(activation)

        self.border_mode = border_mode
        self.subsample_length = subsample_length

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.input_spec = [InputSpec(ndim=3)]
        self.initial_weights = weights
        self.input_dim = input_dim
        self.input_length = input_length
        if self.input_dim:
            kwargs['input_shape'] = (self.input_length, self.input_dim)
        super(LocallyConnected1D, self).__init__(**kwargs) 
開發者ID:Chemoinformatics,項目名稱:InnerOuterRNN,代碼行數:34,代碼來源:local.py

示例13: __init__

# 需要導入模塊: from keras.layers import activations [as 別名]
# 或者: from keras.layers.activations import get [as 別名]
def __init__(self, output_dim, init='glorot_uniform', attn_activation='tanh', **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.attn_activation = activations.get(attn_activation)
        super(AttentionLayer, self).__init__(**kwargs) 
開發者ID:senochow,項目名稱:text_classification,代碼行數:7,代碼來源:attention_layer.py


注:本文中的keras.layers.activations.get方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。