本文整理汇总了Python中cntk.cross_entropy_with_softmax方法的典型用法代码示例。如果您正苦于以下问题:Python cntk.cross_entropy_with_softmax方法的具体用法?Python cntk.cross_entropy_with_softmax怎么用?Python cntk.cross_entropy_with_softmax使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cntk
的用法示例。
在下文中一共展示了cntk.cross_entropy_with_softmax方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _create_model_and_execute_test
# 需要导入模块: import cntk [as 别名]
# 或者: from cntk import cross_entropy_with_softmax [as 别名]
def _create_model_and_execute_test(params):
# Create CNTK model
input_var = C.input_variable(params['input_dim'], np.float32)
params['input_var'] = input_var
params['act_fun'] = C.layers.blocks.identity
params['init_fun'] = C.glorot_uniform()
model = params['create_model'](params)
label_var = C.input_variable((params['label_dim']), np.float32)
loss = C.cross_entropy_with_softmax(model, label_var)
eval_error = C.classification_error(model, label_var)
lr_schedule = C.learning_rate_schedule(0.05, C.UnitType.minibatch)
learner = C.sgd(model.parameters, lr_schedule)
trainer = C.Trainer(model, (loss, eval_error), [learner])
input_value, label_value = _generate_random_sample(
params['batch_size'],
params['input_dim'],
params['label_dim']
)
# Import to ngraph
ng_loss, placeholders = CNTKImporter(batch_size=params['batch_size']).import_model(loss)
parallel_update = CommonSGDOptimizer(0.05).minimize(ng_loss, ng_loss.variables())
transformer = ng.transformers.make_transformer()
update_fun = transformer.computation([ng_loss, parallel_update], *placeholders)
# Execute on CNTK
trainer.train_minibatch({input_var: input_value, label_var: label_value})
cntk_ret = trainer.previous_minibatch_loss_average
# Execute on ngraph
input_value = np.moveaxis(input_value, 0, -1)
label_value = np.moveaxis(label_value, 0, -1)
ng_ret = update_fun(input_value, label_value)[0]
return cntk_ret, ng_ret
示例2: categorical_crossentropy
# 需要导入模块: import cntk [as 别名]
# 或者: from cntk import cross_entropy_with_softmax [as 别名]
def categorical_crossentropy(target, output, from_logits=False, axis=-1):
# Here, unlike other backends, the tensors lack a batch dimension:
axis_without_batch = -1 if axis == -1 else axis - 1
output_dimensions = list(range(len(output.shape)))
if axis_without_batch != -1 and axis_without_batch not in output_dimensions:
raise ValueError(
'{}{}{}'.format(
'Unexpected channels axis {}. '.format(axis_without_batch),
'Expected to be -1 or one of the axes of `output`, ',
'which has {} dimensions.'.format(len(output.shape))))
# If the channels are not in the last axis, move them to be there:
if axis_without_batch != -1 and axis_without_batch != output_dimensions[-1]:
permutation = output_dimensions[:axis_without_batch]
permutation += output_dimensions[axis_without_batch + 1:]
permutation += [axis_without_batch]
output = C.transpose(output, permutation)
target = C.transpose(target, permutation)
if from_logits:
result = C.cross_entropy_with_softmax(output, target)
# cntk's result shape is (batch, 1), while keras expect (batch, )
return C.reshape(result, ())
else:
# scale preds so that the class probas of each sample sum to 1
output /= C.reduce_sum(output, axis=-1)
# avoid numerical instability with epsilon clipping
output = C.clip(output, epsilon(), 1.0 - epsilon())
return -sum(target * C.log(output), axis=-1)
示例3: categorical_crossentropy
# 需要导入模块: import cntk [as 别名]
# 或者: from cntk import cross_entropy_with_softmax [as 别名]
def categorical_crossentropy(target, output, from_logits=False):
if from_logits:
result = C.cross_entropy_with_softmax(output, target)
# cntk's result shape is (batch, 1), while keras expect (batch, )
return C.reshape(result, ())
else:
# scale preds so that the class probas of each sample sum to 1
output /= C.reduce_sum(output, axis=-1)
# avoid numerical instability with epsilon clipping
output = C.clip(output, epsilon(), 1.0 - epsilon())
return -sum(target * C.log(output), axis=-1)
示例4: create_criterion_function_preferred
# 需要导入模块: import cntk [as 别名]
# 或者: from cntk import cross_entropy_with_softmax [as 别名]
def create_criterion_function_preferred(model, labels):
ce = C.cross_entropy_with_softmax(model, labels)
errs = C.classification_error(model, labels)
return ce, errs # (model, labels) -> (loss, error metric)
示例5: create_criterion_function
# 需要导入模块: import cntk [as 别名]
# 或者: from cntk import cross_entropy_with_softmax [as 别名]
def create_criterion_function(model):
labels = C.placeholder(name='labels')
ce = C.cross_entropy_with_softmax(model, labels)
errs = C.classification_error(model, labels)
return C.combine([ce, errs]) # (features, labels) -> (loss, metric)
示例6: categorical_crossentropy
# 需要导入模块: import cntk [as 别名]
# 或者: from cntk import cross_entropy_with_softmax [as 别名]
def categorical_crossentropy(output, target, from_logits=False):
if from_logits:
result = C.cross_entropy_with_softmax(output, target)
# cntk's result shape is (batch, 1), while keras expect (batch, )
return C.reshape(result, ())
else:
# scale preds so that the class probas of each sample sum to 1
output /= C.reduce_sum(output, axis=-1)
# avoid numerical instability with _EPSILON clipping
output = C.clip(output, _EPSILON, 1.0 - _EPSILON)
return -sum(target * C.log(output), axis=-1)