本文整理汇总了Python中nn_plankton.log_loss函数的典型用法代码示例。如果您正苦于以下问题:Python log_loss函数的具体用法?Python log_loss怎么用?Python log_loss使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了log_loss函数的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: loss
def loss(y, t):
return nn_plankton.log_loss(y, t) + reg_param*(alpha * L1 + (1-alpha) * L2)
示例2: loss
def loss(y, t):
return nn_plankton.log_loss(y, t) + lambda_reg * reg_term
示例3: xrange
print "Individual prediction errors"
individual_prediction_errors = [utils.log_loss(p, t_valid) for p in predictions_list]
del predictions_list
for i in xrange(n_models):
print individual_prediction_errors[i], os.path.basename(valid_predictions_paths[i])
print
# optimizing weights
X = theano.shared(predictions_stack) # source predictions
t = theano.shared(utils.one_hot(t_valid)) # targets
W = T.vector('W')
s = T.nnet.softmax(W).reshape((W.shape[0], 1, 1))
weighted_avg_predictions = T.sum(X * s, axis=0) # T.tensordot(X, s, [[0], [0]])
error = nn_plankton.log_loss(weighted_avg_predictions, t)
grad = T.grad(error, W)
f = theano.function([W], error)
g = theano.function([W], grad)
w_init = np.zeros(n_models, dtype=theano.config.floatX)
out, loss, _ = scipy.optimize.fmin_l_bfgs_b(f, w_init, fprime=g, pgtol=1e-09, epsilon=1e-08, maxfun=10000)
weights = np.exp(out)
weights /= weights.sum()
print 'Optimal weights'
for i in xrange(n_models):
print weights[i], os.path.basename(valid_predictions_paths[i])
print