当前位置: 首页>>代码示例>>Python>>正文


Python core.RepeatVector方法代码示例

本文整理汇总了Python中keras.layers.core.RepeatVector方法的典型用法代码示例。如果您正苦于以下问题:Python core.RepeatVector方法的具体用法?Python core.RepeatVector怎么用?Python core.RepeatVector使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在keras.layers.core的用法示例。


在下文中一共展示了core.RepeatVector方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: create

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def create(self):
        self.textual_embedding(self, mask_zero=True)
        self.stacked_RNN(self)
        self.add(self._config.recurrent_encoder(
            self._config.hidden_state_dim, 
            return_sequences=False,
            go_backwards=self._config.go_backwards))
        self.add(Dropout(0.5))
        self.add(RepeatVector(self._config.max_output_time_steps))
        self.add(self._config.recurrent_decoder(
                self._config.hidden_state_dim, return_sequences=True))
        self.add(Dropout(0.5))
        self.add(TimeDistributedDense(self._config.output_dim))
        self.add(Activation('softmax'))


###
# Multimodal models
### 
开发者ID:mateuszmalinowski,项目名称:visual_turing_test-tutorial,代码行数:21,代码来源:model_zoo.py

示例2: _buildDecoder

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def _buildDecoder(self, z, latent_rep_size, max_length, charset_length):
    h = Dense(latent_rep_size, name='latent_input', activation='relu')(z)
    h = RepeatVector(max_length, name='repeat_vector')(h)
    h = GRU(501, return_sequences=True, name='gru_1')(h)
    h = GRU(501, return_sequences=True, name='gru_2')(h)
    h = GRU(501, return_sequences=True, name='gru_3')(h)
    return TimeDistributed(
        Dense(charset_length, activation='softmax'), name='decoded_mean')(h) 
开发者ID:deepchem,项目名称:deepchem,代码行数:10,代码来源:model.py

示例3: test_repeat_vector

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def test_repeat_vector(self):
        layer = core.RepeatVector(10)
        self._runner(layer) 
开发者ID:lllcho,项目名称:CAPTCHA-breaking,代码行数:5,代码来源:test_core.py

示例4: build_model

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def build_model(input_size, seq_len, hidden_size):
    """建立一个 sequence to sequence 模型"""
    model = Sequential()
    model.add(GRU(input_dim=input_size, output_dim=hidden_size, return_sequences=False))
    model.add(Dense(hidden_size, activation="relu"))
    model.add(RepeatVector(seq_len))
    model.add(GRU(hidden_size, return_sequences=True))
    model.add(TimeDistributed(Dense(output_dim=input_size, activation="linear")))
    model.compile(loss="mse", optimizer='adam')

    return model 
开发者ID:Linusp,项目名称:soph,代码行数:13,代码来源:pig_latin.py

示例5: build_model

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def build_model(input_size, seq_len, hidden_size):
    """建立一个 seq2seq 模型"""
    model = Sequential()
    model.add(GRU(input_dim=input_size, output_dim=hidden_size, return_sequences=False))
    model.add(Dense(hidden_size, activation="relu"))
    model.add(RepeatVector(seq_len))
    model.add(GRU(hidden_size, return_sequences=True))
    model.add(TimeDistributed(Dense(output_dim=input_size, activation="softmax")))
    model.compile(loss="categorical_crossentropy", optimizer='adam')

    return model 
开发者ID:Linusp,项目名称:soph,代码行数:13,代码来源:adder.py

示例6: base_model

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def base_model(feature_len=1, after_day=1, input_shape=(20, 1)):
    model = Sequential()

    model.add(Conv1D(10, kernel_size=5, input_shape=input_shape,  activation='relu', padding='valid', strides=1))
    model.add(LSTM(100, return_sequences=False, input_shape=input_shape))
    model.add(Dropout(0.25))

    # one to many
    model.add(RepeatVector(after_day))
    model.add(LSTM(200, return_sequences=True))
    model.add(Dropout(0.25))
    model.add(TimeDistributed(Dense(100, activation='relu', kernel_initializer='uniform')))
    model.add(TimeDistributed(Dense(feature_len, activation='linear', kernel_initializer='uniform')))

    return model 
开发者ID:kaka-lin,项目名称:stock-price-predict,代码行数:17,代码来源:lstm_cnn.py

示例7: base_model

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def base_model(feature_len=1, after_day=1, input_shape=(20, 1)):
    model = Sequential()

    model.add(LSTM(units=100, return_sequences=False, input_shape=input_shape))
    #model.add(LSTM(units=100, return_sequences=False, input_shape=input_shape))

    # one to many
    model.add(RepeatVector(after_day))
    model.add(LSTM(200, return_sequences=True))
    #model.add(LSTM(50, return_sequences=True))

    model.add(TimeDistributed(Dense(units=feature_len, activation='linear')))

    return model 
开发者ID:kaka-lin,项目名称:stock-price-predict,代码行数:16,代码来源:lstm_mtm.py

示例8: build_CNN_LSTM

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def build_CNN_LSTM(channels, width, height, lstm_output_size, nb_classes):
	model = Sequential()
	# 1 conv
	model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu', 
		input_shape=(channels, height, width)))
	model.add(BatchNormalization(mode=0, axis=1))
	# 2 conv
	model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))
	model.add(BatchNormalization(mode=0, axis=1))
	model.add(MaxPooling2D(pool_size=(2, 2), strides=(2,2)))
	# 3 conv
	model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))
	model.add(BatchNormalization(mode=0, axis=1))
	# 4 conv
	model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))
	model.add(BatchNormalization(mode=0, axis=1))
	model.add(MaxPooling2D(pool_size=(2, 2), strides=(2,2)))
	# flaten
	a = model.add(Flatten())
	# 1 dense
	model.add(Dense(512, activation='relu'))
	model.add(BatchNormalization())
	model.add(Dropout(0.5))
	# 2 dense
	model.add(Dense(512, activation='relu'))
	model.add(BatchNormalization())
	model.add(Dropout(0.5))
	# lstm
	model.add(RepeatVector(lstm_output_size))
	model.add(LSTM(512, return_sequences=True))
	model.add(TimeDistributed(Dropout(0.5)))
	model.add(TimeDistributed(Dense(nb_classes, activation='softmax')))
	model.summary()
	model.compile(loss='categorical_crossentropy',
				  optimizer='adam',
				  metrics=[categorical_accuracy_per_sequence],
				  sample_weight_mode='temporal'
				  )

	return model 
开发者ID:xingjian-f,项目名称:DeepLearning-OCR,代码行数:42,代码来源:CNN_LSTM.py

示例9: _buildDecoder

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def _buildDecoder(self, z, latent_rep_size, max_length, charset_length):
        h = Dense(latent_rep_size, name='latent_input', activation = 'relu')(z)
        h = RepeatVector(max_length, name='repeat_vector')(h)
        h = GRU(501, return_sequences = True, name='gru_1')(h)
        h = GRU(501, return_sequences = True, name='gru_2')(h)
        h = GRU(501, return_sequences = True, name='gru_3')(h)
        return TimeDistributed(Dense(charset_length, activation='softmax'), name='decoded_mean')(h) 
开发者ID:maxhodak,项目名称:keras-molecules,代码行数:9,代码来源:model.py

示例10: construct_model

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def construct_model(maxlen, input_dimension, output_dimension, lstm_vector_output_dim):
    """
        Склеены три слова
    """
    input = Input(shape=(maxlen, input_dimension), name='input')


    # lstm_encode = LSTM(lstm_vector_output_dim)(input)
    lstm_encode = SimpleRNN(lstm_vector_output_dim, activation='sigmoid')(input)


    encoded_copied = RepeatVector(n=maxlen)(lstm_encode)


    # lstm_decode = LSTM(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)
    lstm_decode = SimpleRNN(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)


    decoded = TimeDistributed(Dense(output_dimension, activation='softmax'))(lstm_decode)


    encoder_decoder = Model(input, decoded)


    adam = Adam()
    encoder_decoder.compile(loss='categorical_crossentropy', optimizer=adam)


    return encoder_decoder 
开发者ID:kootenpv,项目名称:neural_complete,代码行数:31,代码来源:model.py

示例11: construct_model

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def construct_model(maxlen, input_dimension, output_dimension, lstm_vector_output_dim):
    """
    Склеены три слова
    """
    input = Input(shape=(maxlen, input_dimension), name='input')


    # lstm_encode = LSTM(lstm_vector_output_dim)(input)
    lstm_encode = SimpleRNN(lstm_vector_output_dim, activation='relu')(input)


    encoded_copied = RepeatVector(n=maxlen)(lstm_encode)


    # lstm_decode = LSTM(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)
    lstm_decode = SimpleRNN(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)


    encoder = Model(input, lstm_decode)


    adam = Adam()
    encoder.compile(loss='categorical_crossentropy', optimizer=adam)


    return encoder 
开发者ID:kootenpv,项目名称:neural_complete,代码行数:28,代码来源:model_all_stacked.py

示例12: build

# 需要导入模块: from keras.layers import core [as 别名]
# 或者: from keras.layers.core import RepeatVector [as 别名]
def build(self):
        enc_size = self.size_of_env_observation()
        argument_size = IntegerArguments.size_of_arguments
        input_enc = InputLayer(batch_input_shape=(self.batch_size, enc_size), name='input_enc')
        input_arg = InputLayer(batch_input_shape=(self.batch_size, argument_size), name='input_arg')
        input_prg = Embedding(input_dim=PROGRAM_VEC_SIZE, output_dim=PROGRAM_KEY_VEC_SIZE, input_length=1,
                              batch_input_shape=(self.batch_size, 1))

        f_enc = Sequential(name='f_enc')
        f_enc.add(Merge([input_enc, input_arg], mode='concat'))
        f_enc.add(MaxoutDense(128, nb_feature=4))
        self.f_enc = f_enc

        program_embedding = Sequential(name='program_embedding')
        program_embedding.add(input_prg)

        f_enc_convert = Sequential(name='f_enc_convert')
        f_enc_convert.add(f_enc)
        f_enc_convert.add(RepeatVector(1))

        f_lstm = Sequential(name='f_lstm')
        f_lstm.add(Merge([f_enc_convert, program_embedding], mode='concat'))
        f_lstm.add(LSTM(256, return_sequences=False, stateful=True, W_regularizer=l2(0.0000001)))
        f_lstm.add(Activation('relu', name='relu_lstm_1'))
        f_lstm.add(RepeatVector(1))
        f_lstm.add(LSTM(256, return_sequences=False, stateful=True, W_regularizer=l2(0.0000001)))
        f_lstm.add(Activation('relu', name='relu_lstm_2'))
        # plot(f_lstm, to_file='f_lstm.png', show_shapes=True)

        f_end = Sequential(name='f_end')
        f_end.add(f_lstm)
        f_end.add(Dense(1, W_regularizer=l2(0.001)))
        f_end.add(Activation('sigmoid', name='sigmoid_end'))

        f_prog = Sequential(name='f_prog')
        f_prog.add(f_lstm)
        f_prog.add(Dense(PROGRAM_KEY_VEC_SIZE, activation="relu"))
        f_prog.add(Dense(PROGRAM_VEC_SIZE, W_regularizer=l2(0.0001)))
        f_prog.add(Activation('softmax', name='softmax_prog'))
        # plot(f_prog, to_file='f_prog.png', show_shapes=True)

        f_args = []
        for ai in range(1, IntegerArguments.max_arg_num+1):
            f_arg = Sequential(name='f_arg%s' % ai)
            f_arg.add(f_lstm)
            f_arg.add(Dense(IntegerArguments.depth, W_regularizer=l2(0.0001)))
            f_arg.add(Activation('softmax', name='softmax_arg%s' % ai))
            f_args.append(f_arg)
        # plot(f_arg, to_file='f_arg.png', show_shapes=True)

        self.model = Model([input_enc.input, input_arg.input, input_prg.input],
                           [f_end.output, f_prog.output] + [fa.output for fa in f_args],
                           name="npi")
        self.compile_model()
        plot(self.model, to_file='model.png', show_shapes=True) 
开发者ID:mokemokechicken,项目名称:keras_npi,代码行数:57,代码来源:model.py


注:本文中的keras.layers.core.RepeatVector方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。