本文整理汇总了Python中keras.backend.categorical_crossentropy方法的典型用法代码示例。如果您正苦于以下问题:Python backend.categorical_crossentropy方法的具体用法?Python backend.categorical_crossentropy怎么用?Python backend.categorical_crossentropy使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.backend
的用法示例。
在下文中一共展示了backend.categorical_crossentropy方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: categorical_crossentropy_color
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def categorical_crossentropy_color(y_true, y_pred):
# Flatten
n, h, w, q = y_true.shape
y_true = K.reshape(y_true, (n * h * w, q))
y_pred = K.reshape(y_pred, (n * h * w, q))
weights = y_true[:, 313:] # extract weight from y_true
weights = K.concatenate([weights] * 313, axis=1)
y_true = y_true[:, :-1] # remove last column
y_pred = y_pred[:, :-1] # remove last column
# multiply y_true by weights
y_true = y_true * weights
cross_ent = K.categorical_crossentropy(y_pred, y_true)
cross_ent = K.mean(cross_ent, axis=-1)
return cross_ent
示例2: gen_adv_loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def gen_adv_loss(logits, y, loss='logloss', mean=False):
"""
Generate the loss function.
"""
if loss == 'training':
# use the model's output instead of the true labels to avoid
# label leaking at training time
y = K.cast(K.equal(logits, K.max(logits, 1, keepdims=True)), "float32")
y = y / K.sum(y, 1, keepdims=True)
out = K.categorical_crossentropy(y, logits, from_logits=True)
elif loss == 'logloss':
out = K.categorical_crossentropy(y, logits, from_logits=True)
else:
raise ValueError("Unknown loss: {}".format(loss))
if mean:
out = K.mean(out)
# else:
# out = K.sum(out)
return out
示例3: gen_adv_loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def gen_adv_loss(logits, y, loss='logloss', mean=False):
"""
Generate the loss function.
"""
if loss == 'training':
# use the model's output instead of the true labels to avoid
# label leaking at training time
y = K.cast(K.equal(logits, K.max(logits, 1, keepdims=True)), "float32")
y = y / K.sum(y, 1, keepdims=True)
out = K.categorical_crossentropy(logits, y, from_logits=True)
elif loss == 'logloss':
# out = K.categorical_crossentropy(logits, y, from_logits=True)
out = tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=y)
out = tf.reduce_mean(out)
else:
raise ValueError("Unknown loss: {}".format(loss))
if mean:
out = tf.mean(out)
# else:
# out = K.sum(out)
return out
示例4: build_3dcnn_model
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def build_3dcnn_model(self, fusion_type, Fusion):
if len(Fusion[0]) == 1:
input_shape = (32, 32, len(Fusion))
model_in,model = self.cnn_2D(input_shape)
else:
input_shape = (32, 32, 5, len(Fusion))
model_in,model = self.cnn_3D(input_shape)
model = Dropout(0.5)(model)
model = Dense(32, activation='relu', name = 'fc2')(model)
model = Dense(self.config.classes, activation='softmax', name = 'fc3')(model)
model = Model(input=model_in,output=model)
# 统计参数
# model.summary()
plot_model(model,to_file='experiments/img/' + str(Fusion) + fusion_type + r'_model.png',show_shapes=True)
print(' Saving model Architecture')
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)
# model.compile(optimizer=adam, loss=self.mycrossentropy, metrics=['accuracy']) #有改善,但不稳定
model.compile(optimizer=adam, loss='categorical_crossentropy', metrics=['accuracy'])
return model
示例5: load_model_and_generate
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def load_model_and_generate(self, model_name='model7_laf', epochs=10):
dt = datetime.datetime.now().strftime('_date_%Y-%m-%d_%H-%M-%S')
dir_name = './generated_results/pdfs/' + model_name + dt + 'epochs_' + str(epochs) + '/'
if not os.path.exists(dir_name):
os.makedirs(dir_name)
model = load_model('./model_checkpoint/best_models/'
'model7_laf_date_2018-06-19_12-23-39_epoch_30_val_loss_0.8395.h5',
compile=False)
optimizer = Adam(lr=0.0001) # Reduce from 0.001 to 0.0001 for model_10
model.compile(optimizer=optimizer,
loss='categorical_crossentropy',
# metrics=['accuracy']
metrics=['accuracy'])
seq = self.generate_and_fuzz_new_samples(model=model,
model_name=model_name,
epochs=epochs,
current_epoch=10,
dir_name=dir_name)
list_of_obj = preprocess.get_list_of_object(seq=seq, is_sort=False)
return list_of_obj
示例6: load_model_and_generate
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def load_model_and_generate(self, model_name='model_7', epochs=38):
dt = datetime.datetime.now().strftime('_date_%Y-%m-%d_%H-%M-%S')
dir_name = './generated_results/pdfs/' + model_name + dt + 'epochs_' + str(epochs) + '/'
if not os.path.exists(dir_name):
os.makedirs(dir_name)
model = load_model('./model_checkpoint/best_models/'
'model_7_date_2018-05-14_21-44-21_epoch_38_val_loss_0.3300.h5',
compile=False)
optimizer = Adam(lr=0.001) # Reduce from 0.001 to 0.0001 just for model_10
model.compile(optimizer=optimizer,
loss='categorical_crossentropy',
# metrics=['accuracy']
metrics=['accuracy'])
seq = self.generate_and_fuzz_new_samples(model=model,
model_name=model_name,
epochs=epochs,
current_epoch=38,
dir_name=dir_name)
list_of_obj = preprocess.get_list_of_object(seq=seq, is_sort=False)
return list_of_obj
示例7: load_model_and_generate
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def load_model_and_generate(self, model_name='model7_laf', epochs=50):
dt = datetime.datetime.now().strftime('_date_%Y-%m-%d_%H-%M-%S')
dir_name = './generated_results/pdfs/' + model_name + dt + 'epochs_' + str(epochs) + '/'
if not os.path.exists(dir_name):
os.makedirs(dir_name)
model = load_model('./model_checkpoint/best_models/'
'model7_laf_date_2018-06-19_12-23-39_epoch_50_val_loss_0.7242.h5',
compile=False)
optimizer = Adam(lr=0.0001) # Reduce from 0.001 to 0.0001 for model_10
model.compile(optimizer=optimizer,
loss='categorical_crossentropy',
# metrics=['accuracy']
metrics=['accuracy'])
seq = self.generate_and_fuzz_new_samples(model=model,
model_name=model_name,
epochs=epochs,
current_epoch=50,
dir_name=dir_name)
list_of_obj = preprocess.get_list_of_object(seq=seq, is_sort=False)
return list_of_obj
示例8: augmented_loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def augmented_loss(self, y_true, y_pred):
_y_pred = Activation("softmax")(y_pred)
loss = K.categorical_crossentropy(_y_pred, y_true)
# y is (batch x seq x vocab)
y_indexes = K.argmax(y_true, axis=2) # turn one hot to index. (batch x seq)
y_vectors = self.embedding(y_indexes) # lookup the vector (batch x seq x vector_length)
#v_length = self.setting.vector_length
#y_vectors = K.reshape(y_vectors, (-1, v_length))
#y_t = K.map_fn(lambda v: K.dot(self.embedding.embeddings, K.reshape(v, (-1, 1))), y_vectors)
#y_t = K.squeeze(y_t, axis=2) # unknown but necessary operation
#y_t = K.reshape(y_t, (-1, self.sequence_size, self.vocab_size))
# vector x embedding dot products (batch x seq x vocab)
y_t = tf.tensordot(y_vectors, K.transpose(self.embedding.embeddings), 1)
y_t = K.reshape(y_t, (-1, self.sequence_size, self.vocab_size)) # explicitly set shape
y_t = K.softmax(y_t / self.temperature)
_y_pred_t = Activation("softmax")(y_pred / self.temperature)
aug_loss = kullback_leibler_divergence(y_t, _y_pred_t)
loss += (self.gamma * self.temperature) * aug_loss
return loss
示例9: abstention_loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def abstention_loss(y_true, y_pred):
""" Function to compute abstention loss. It is composed by two terms: (i) original loss of the multiclass classification problem, (ii) cost associated to the abstaining samples.
Parameters
----------
y_true : keras tensor
True values to predict
y_pred : keras tensor
Prediction made by the model. It is assumed that this keras tensor includes extra columns to store the abstaining classes.
"""
cost = 0
base_pred = (1-mask)*y_pred
base_true = y_true
base_cost = K.categorical_crossentropy(base_true,base_pred)
abs_pred = K.mean(mask*y_pred, axis=-1)
cost = (1.-abs_pred)*base_cost - mu*K.log(1.-abs_pred)
return cost
示例10: gen_adv_loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def gen_adv_loss(logits, y, loss='logloss', mean=False):
"""
Generate the loss function.
"""
if loss == 'training':
# use the model's output instead of the true labels to avoid
# label leaking at training time
y = K.cast(K.equal(logits, K.max(logits, 1, keepdims=True)), "float32")
y = y / K.sum(y, 1, keepdims=True)
out = K.categorical_crossentropy(logits, y, from_logits=True)
elif loss == 'logloss':
out = K.categorical_crossentropy(logits, y, from_logits=True)
else:
raise ValueError("Unknown loss: {}".format(loss))
if mean:
out = K.mean(out)
else:
out = K.sum(out)
return out
示例11: gen_grad_ens
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def gen_grad_ens(x, logits, y):
adv_loss = K.categorical_crossentropy(logits[0], y, from_logits=True)
if len(logits) >= 1:
for i in range(1, len(logits)):
adv_loss += K.categorical_crossentropy(logits[i], y, from_logits=True)
grad = K.gradients(adv_loss, [x])[0]
return adv_loss, grad
示例12: loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def loss(y_true, y_pred):
loss = K.categorical_crossentropy(y_true,y_pred)
return loss
示例13: build_fusion_model
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def build_fusion_model(self, fusion_type, Fusion):
model_list = []
input_list = []
for modual in Fusion:
if len(modual) == 1:
input_shape = (32, 32, 1)
signle_input,single_model = self.cnn_2D(input_shape, modual)
else:
input_shape = (32, 32, 5, 1)
signle_input,single_model = self.cnn_3D(input_shape, modual)
model_list.append(single_model)
input_list.append(signle_input)
# 融合模型
model = self.nn_fusion(input_list,model_list, self.config.classes, fusion_type)
# 统计参数
model.summary()
plot_model(model,to_file='experiments/img/' + str(Fusion) + fusion_type + r'_model.png',show_shapes=True)
print(' Saving model Architecture')
# raw_input()
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)
# model.compile(optimizer=adam, loss=self.mycrossentropy, metrics=['accuracy']) #有改善,但不稳定
model.compile(optimizer=adam, loss='categorical_crossentropy', metrics=['accuracy'])
return model
示例14: mycrossentropy
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def mycrossentropy(self, y_true, y_pred):
e = 0.3
# for i in range(y_true.shape[0]):
# for j in range(3):
# sum += 0.1*(-1**y_true(i,j))*exp(abs(np.argmax(y_true[i,:])-j))*log(y_pred(i,j))
# return sum/len
# y = np.argmax(y_true, axis=1)
# y_ = np.argmax(y_pred, axis=1)
# print '*****************',y_pred
# return (1-e)*K.categorical_crossentropy(y_pred,y_true) - e*K.categorical_crossentropy(y_pred, (1-y_true)/(self.config.classes-1))
return (1-e)*K.categorical_crossentropy(y_pred,y_true) + e*K.categorical_crossentropy(y_pred, K.ones_like(y_pred)/2)
示例15: truncated_loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import categorical_crossentropy [as 别名]
def truncated_loss(y_true, y_pred):
y_true = y_true[:, :VAL_MAXLEN, :]
y_pred = y_pred[:, :VAL_MAXLEN, :]
loss = K.categorical_crossentropy(
target=y_true, output=y_pred, from_logits=False)
return K.mean(loss, axis=-1)