本文整理汇总了Python中keras.metrics.binary_crossentropy方法的典型用法代码示例。如果您正苦于以下问题:Python metrics.binary_crossentropy方法的具体用法?Python metrics.binary_crossentropy怎么用?Python metrics.binary_crossentropy使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.metrics
的用法示例。
在下文中一共展示了metrics.binary_crossentropy方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: vae_loss
# 需要导入模块: from keras import metrics [as 别名]
# 或者: from keras.metrics import binary_crossentropy [as 别名]
def vae_loss(self, x, x_decoded_mean):
xent_loss = original_dim * metrics.binary_crossentropy(x, x_decoded_mean)
kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
示例2: vae_loss
# 需要导入模块: from keras import metrics [as 别名]
# 或者: from keras.metrics import binary_crossentropy [as 别名]
def vae_loss(self, x, x_decoded_mean_squash):
x = K.flatten(x)
x_decoded_mean_squash = K.flatten(x_decoded_mean_squash)
xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash)
kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
示例3: vae_loss
# 需要导入模块: from keras import metrics [as 别名]
# 或者: from keras.metrics import binary_crossentropy [as 别名]
def vae_loss(self, x, x_decoded_mean_squash):
x = K.flatten(x)
x_decoded_mean_squash = K.flatten(x_decoded_mean_squash)
xent_loss = img_rows * img_cols * \
metrics.binary_crossentropy(x, x_decoded_mean_squash)
kl_loss = - 0.5 * K.mean(
1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
示例4: vae_loss
# 需要导入模块: from keras import metrics [as 别名]
# 或者: from keras.metrics import binary_crossentropy [as 别名]
def vae_loss(self, x, x_decoded_mean_squash, z_mean, z_log_var):
x = K.flatten(x)
x_decoded_mean_squash = K.flatten(x_decoded_mean_squash)
img_rows, img_cols = self._img_rows, self._img_cols
# generative or reconstruction loss
xent_loss = img_rows * img_cols * \
metrics.binary_crossentropy(x, x_decoded_mean_squash)
# Kullback-Leibler divergence loss
kl_loss = - 0.5 * K.mean(
1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
示例5: xent
# 需要导入模块: from keras import metrics [as 别名]
# 或者: from keras.metrics import binary_crossentropy [as 别名]
def xent(y_true, y_pred):
return binary_crossentropy(y_true, y_pred)
示例6: get_net
# 需要导入模块: from keras import metrics [as 别名]
# 或者: from keras.metrics import binary_crossentropy [as 别名]
def get_net(input_shape=(CUBE_SIZE, CUBE_SIZE, CUBE_SIZE, 1), load_weight_path=None, features=False, mal=False) -> Model:
inputs = Input(shape=input_shape, name="input_1")
x = inputs
x = AveragePooling3D(pool_size=(2, 1, 1), strides=(2, 1, 1), border_mode="same")(x)
x = Convolution3D(64, 3, 3, 3, activation='relu', border_mode='same', name='conv1', subsample=(1, 1, 1))(x)
x = MaxPooling3D(pool_size=(1, 2, 2), strides=(1, 2, 2), border_mode='valid', name='pool1')(x)
# 2nd layer group
x = Convolution3D(128, 3, 3, 3, activation='relu', border_mode='same', name='conv2', subsample=(1, 1, 1))(x)
x = MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), border_mode='valid', name='pool2')(x)
if USE_DROPOUT:
x = Dropout(p=0.3)(x)
# 3rd layer group
x = Convolution3D(256, 3, 3, 3, activation='relu', border_mode='same', name='conv3a', subsample=(1, 1, 1))(x)
x = Convolution3D(256, 3, 3, 3, activation='relu', border_mode='same', name='conv3b', subsample=(1, 1, 1))(x)
x = MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), border_mode='valid', name='pool3')(x)
if USE_DROPOUT:
x = Dropout(p=0.4)(x)
# 4th layer group
x = Convolution3D(512, 3, 3, 3, activation='relu', border_mode='same', name='conv4a', subsample=(1, 1, 1))(x)
x = Convolution3D(512, 3, 3, 3, activation='relu', border_mode='same', name='conv4b', subsample=(1, 1, 1),)(x)
x = MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), border_mode='valid', name='pool4')(x)
if USE_DROPOUT:
x = Dropout(p=0.5)(x)
last64 = Convolution3D(64, 2, 2, 2, activation="relu", name="last_64")(x)
out_class = Convolution3D(1, 1, 1, 1, activation="sigmoid", name="out_class_last")(last64)
out_class = Flatten(name="out_class")(out_class)
out_malignancy = Convolution3D(1, 1, 1, 1, activation=None, name="out_malignancy_last")(last64)
out_malignancy = Flatten(name="out_malignancy")(out_malignancy)
model = Model(input=inputs, output=[out_class, out_malignancy])
if load_weight_path is not None:
model.load_weights(load_weight_path, by_name=False)
model.compile(optimizer=SGD(lr=LEARN_RATE, momentum=0.9, nesterov=True), loss={"out_class": "binary_crossentropy", "out_malignancy": mean_absolute_error}, metrics={"out_class": [binary_accuracy, binary_crossentropy], "out_malignancy": mean_absolute_error})
if features:
model = Model(input=inputs, output=[last64])
model.summary(line_length=140)
return model