本文整理汇总了Python中keras_frcnn.RoiPoolingConv.RoiPoolingConv方法的典型用法代码示例。如果您正苦于以下问题:Python RoiPoolingConv.RoiPoolingConv方法的具体用法?Python RoiPoolingConv.RoiPoolingConv怎么用?Python RoiPoolingConv.RoiPoolingConv使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras_frcnn.RoiPoolingConv
的用法示例。
在下文中一共展示了RoiPoolingConv.RoiPoolingConv方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes = 21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 14
input_shape = (num_rois,14,14,1024)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois,1024,7,7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = classifier_layers(out_roi_pool, input_shape=input_shape, trainable=True)
out = TimeDistributed(Flatten())(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例2: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes = 21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 7
input_shape = (num_rois,7,7,512)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois,512,7,7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = TimeDistributed(Flatten(name='flatten'))(out_roi_pool)
out = TimeDistributed(Dense(4096, activation='relu', name='fc1'))(out)
out = TimeDistributed(Dropout(0.5))(out)
out = TimeDistributed(Dense(4096, activation='relu', name='fc2'))(out)
out = TimeDistributed(Dropout(0.5))(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例3: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes=21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 14
# Changed the input shape to 1088 from 1024 because of nn_base's output being 1088. Not sure if this is correct
input_shape = (num_rois, 14, 14, 1088)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois, 1024, 7, 7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = classifier_layers(out_roi_pool, input_shape=input_shape, trainable=True)
out = TimeDistributed(Flatten())(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例4: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes = 21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 7
input_shape = (num_rois, 7, 7, 512)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois, 512, 7, 7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = TimeDistributed(Flatten(name='flatten'))(out_roi_pool)
out = TimeDistributed(Dense(4096, activation='relu', name='fc1'))(out)
out = TimeDistributed(Dense(4096, activation='relu', name='fc2'))(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例5: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes=21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 14
input_shape = (num_rois, 14, 14, 1024)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois, 1024, 7, 7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = classifier_layers(out_roi_pool, input_shape=input_shape, trainable=True)
out = TimeDistributed(Flatten())(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例6: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes,trainable=True):
"""
The final classifier to match original implementation for VGG-16
The only difference being the Roipooling layer uses tensorflow's bilinear interpolation
"""
pooling_regions = 7
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois,trainable=trainable)([base_layers, input_rois])
out = TimeDistributed(Flatten(),name="flatten",trainable=trainable)(out_roi_pool)
out = TimeDistributed(Dense(4096, activation='relu',trainable=trainable),name="fc1",trainable=trainable)(out)
out = TimeDistributed(Dropout(0.5),name="drop_out1",trainable=trainable)(out) # add dropout to match original implememtation
out = TimeDistributed(Dense(4096, activation='relu',trainable=trainable),name="fc2",trainable=trainable)(out)
out = TimeDistributed(Dropout(0.5),name="drop_out2",trainable=trainable)(out) # add dropout to match original implementation
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero',trainable=trainable), name='dense_class_{}'.format(nb_classes),trainable=trainable)(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero',trainable=trainable), name='dense_regress_{}'.format(nb_classes),trainable=trainable)(out)
return [out_class, out_regr]
示例7: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes = 21, trainable=True):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 14
input_shape = (num_rois,14,14,1024)
elif K.backend() == 'theano':
raise ValueError("Theano backend not supported")
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois,trainable=trainable)([base_layers, input_rois])
out = classifier_layers(out_roi_pool, input_shape=input_shape, trainable=True)
out = TimeDistributed(Flatten())(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero',trainable=trainable), name='dense_class_{}'.format(nb_classes),trainable=trainable)(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero',trainable=trainable), name='dense_regress_{}'.format(nb_classes),trainable=trainable)(out)
return [out_class, out_regr]
示例8: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes=21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 14
input_shape = (num_rois, 14, 14, 1024)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois, 1024, 7, 7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = classifier_layers(out_roi_pool, input_shape=input_shape, trainable=True)
out = TimeDistributed(Flatten())(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'),
name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes - 1), activation='linear', kernel_initializer='zero'),
name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例9: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes = 21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 7
input_shape = (num_rois,7,7,512)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois,512,7,7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = TimeDistributed(Flatten(name='flatten'))(out_roi_pool)
out = TimeDistributed(Dense(4096, activation='relu', name='fc1'))(out)
out = TimeDistributed(Dense(4096, activation='relu', name='fc2'))(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例10: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes = 21, trainable=False):
if K.backend() == 'tensorflow':
pooling_regions = 14
input_shape = (num_rois,14,14,1024)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois,1024,7,7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = classifier_layers(out_roi_pool, input_shape=input_shape, trainable=True)
out = TimeDistributed(Flatten())(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
示例11: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers,input_rois,num_rois,nb_classes = 21):
pooling_regions = 7
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers,input_rois])
out = classifier_layers(out_roi_pool)
out = TimeDistributed(Flatten(),name='td_flatten')(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax'), name='dense_class_{}'.format(nb_classes))(out)
out_regr = TimeDistributed(Dense(4, activation='linear'), name='dense_regr')(out)
return [out_class,out_regr]
示例12: classifier
# 需要导入模块: from keras_frcnn import RoiPoolingConv [as 别名]
# 或者: from keras_frcnn.RoiPoolingConv import RoiPoolingConv [as 别名]
def classifier(base_layers, input_rois, num_rois, nb_classes,trainable=True):
"""
The final classifier
NOTE:
The Roipooling layer uses tensorflow's bilinear interpolation
"""
channel_axis = 4 # additional TD layer
pooling_regions = 17 # tensorflow implementation
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois,trainable=trainable)([base_layers, input_rois])
# mixed 8: 8 x 8 x 1280
branch3x3 = conv2d_bn_td(out_roi_pool, 192, 1, 1,trainable=trainable)
branch3x3 = conv2d_bn_td(branch3x3, 320, 3, 3,
strides=(2, 2), padding='valid',trainable=trainable)
branch7x7x3 = conv2d_bn_td(out_roi_pool, 192, 1, 1,trainable=trainable)
branch7x7x3 = conv2d_bn_td(branch7x7x3, 192, 1, 7,trainable=trainable)
branch7x7x3 = conv2d_bn_td(branch7x7x3, 192, 7, 1,trainable=trainable)
branch7x7x3 = conv2d_bn_td(
branch7x7x3, 192, 3, 3, strides=(2, 2), padding='valid',trainable=trainable)
branch_pool = TimeDistributed(MaxPooling2D((3, 3), strides=(2, 2),trainable=trainable),trainable=trainable)(out_roi_pool)
x = layers.concatenate(
[branch3x3, branch7x7x3, branch_pool], axis=channel_axis, name='mixed8')
# mixed 9,10: 8 x 8 x 2048
for i in range(2):
branch1x1 = conv2d_bn_td(x, 320, 1, 1,trainable=trainable)
branch3x3 = conv2d_bn_td(x, 384, 1, 1,trainable=trainable)
branch3x3_1 = conv2d_bn_td(branch3x3, 384, 1, 3,trainable=trainable)
branch3x3_2 = conv2d_bn_td(branch3x3, 384, 3, 1,trainable=trainable)
branch3x3 = layers.concatenate(
[branch3x3_1, branch3x3_2], axis=channel_axis, name='mixed9_' + str(i))
branch3x3dbl = conv2d_bn_td(x, 448, 1, 1,trainable=trainable)
branch3x3dbl = conv2d_bn_td(branch3x3dbl, 384, 3, 3,trainable=trainable)
branch3x3dbl_1 = conv2d_bn_td(branch3x3dbl, 384, 1, 3,trainable=trainable)
branch3x3dbl_2 = conv2d_bn_td(branch3x3dbl, 384, 3, 1,trainable=trainable)
branch3x3dbl = layers.concatenate(
[branch3x3dbl_1, branch3x3dbl_2], axis=channel_axis)
branch_pool = TimeDistributed(AveragePooling2D(
(3, 3), strides=(1, 1), padding='same',trainable=trainable),trainable=trainable)(x)
branch_pool = conv2d_bn_td(branch_pool, 192, 1, 1,trainable=trainable)
x = layers.concatenate(
[branch1x1, branch3x3, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(9 + i))
out = TimeDistributed(GlobalAveragePooling2D(trainable=trainable),name='global_avg_pooling',trainable=trainable)(x)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero',trainable=trainable), name='dense_class_{}'.format(nb_classes),trainable=trainable)(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero',trainable=trainable), name='dense_regress_{}'.format(nb_classes),trainable=trainable)(out)
return [out_class, out_regr]