当前位置: 首页>>代码示例>>Python>>正文


Python functions.squared_error函数代码示例

本文整理汇总了Python中nnabla.functions.squared_error函数的典型用法代码示例。如果您正苦于以下问题:Python squared_error函数的具体用法?Python squared_error怎么用?Python squared_error使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了squared_error函数的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: sigma_regularization

def sigma_regularization(ctx, log_var, one):
    with nn.context_scope(ctx):
        h = F.exp(log_var)
        h = F.pow_scalar(h, 0.5)
        b = log_var.shape[0]
        r = F.sum(F.squared_error(h, one)) / b
    return r
开发者ID:kzky,项目名称:works,代码行数:7,代码来源:cnn_model_040.py

示例2: sigma_regularization

def sigma_regularization(ctx, log_var, one):
    with nn.context_scope(ctx):
        h = F.exp(log_var)
        h = F.pow_scalar(h, 0.5)
        h = F.mean(h, axis=1)
        r = F.mean(F.squared_error(h, one))
    return r
开发者ID:kzky,项目名称:works,代码行数:7,代码来源:cnn_model_042.py

示例3: sr_loss_with_uncertainty

def sr_loss_with_uncertainty(ctx, pred0, pred1, log_var0, log_var1):
    #TODO: squared error/absolute error
    s0 = F.exp(log_var0)
    s1 = F.exp(log_var1)
    squared_error = F.squared_error(pred0, pred1)
    with nn.context_scope(ctx):
        loss_sr = F.mean(squared_error * (1 / s0 + 1 / s1) + (s0 / s1 + s1 / s0)) * 0.5
    return loss_sr
开发者ID:kzky,项目名称:works,代码行数:8,代码来源:cnn_model_060.py

示例4: sigmas_regularization

def sigmas_regularization(ctx, log_var0, log_var1):
    with nn.context_scope(ctx):
        h0 = F.exp(log_var0)
        h0 = F.pow_scalar(h0, 0.5)
        h1 = F.exp(log_var1)
        h1 = F.pow_scalar(h1, 0.5)
        r = F.mean(F.squared_error(h0, h1))
    return r
开发者ID:kzky,项目名称:works,代码行数:8,代码来源:cnn_model_060.py

示例5: mnist_lenet_siamese

def mnist_lenet_siamese(x0, x1, test=False):
    """"""
    h0 = mnist_lenet_feature(x0, test)
    h1 = mnist_lenet_feature(x1, test)  # share weights
    # h = (h0 - h1) ** 2 # equivalent
    h = F.squared_error(h0, h1)
    p = F.sum(h, axis=1)
    return p
开发者ID:zwsong,项目名称:nnabla,代码行数:8,代码来源:siamese.py

示例6: sr_loss_with_uncertainty

def sr_loss_with_uncertainty(ctx, pred0, pred1, log_var0, log_var1):
    var0 = F.exp(log_var0)
    var1 = F.exp(log_var1)
    s0 = F.pow_scalar(var0, 0.5)
    s1 = F.pow_scalar(var0, 0.5)
    squared_error = F.squared_error(pred0, pred1)
    with nn.context_scope(ctx):
        loss = F.log(s1/s0) + (var0/var1 + squared_error/var1) * 0.5
        loss_sr = F.mean(loss)
    return loss_sr
开发者ID:kzky,项目名称:works,代码行数:10,代码来源:cnn_model_079.py

示例7: sr_loss_with_uncertainty

def sr_loss_with_uncertainty(ctx, pred0, pred1, log_v0, log_v1, 
                             log_s0, log_s1):
    v0 = F.exp(log_v0)
    v1 = F.exp(log_v1)
    squared_error = F.squared_error(pred0, pred1)
    s0 = F.exp(log_s0)
    s1 = F.exp(log_s1)
    with nn.context_scope(ctx):
        error = squared_error * (1 / v0 + 1 / v1) + (v0 / v1 + v1 / v0) + (s0 / s1 + s1 / s0)
        loss_sr = F.mean(error) * 0.5
    return loss_sr
开发者ID:kzky,项目名称:works,代码行数:11,代码来源:cnn_model_050.py

示例8: sr_loss_with_uncertainty_and_coef

def sr_loss_with_uncertainty_and_coef(ctx, pred0, pred1, log_var0, log_var1):
    c0 = srwu_learned_coef(ctx, log_var0)
    c1 = srwu_learned_coef(ctx, log_var1)
    sc0 = sigmas_learned_coef(ctx, log_var0, log_var1)
    sc1 = sigmas_learned_coef(ctx, log_var1, log_var0)
    c0.need_grad = False
    c1.need_grad = False
    sc0.need_grad = False
    sc1.need_grad = False

    #TODO: squared error/absolute error
    s0 = F.exp(log_var0)
    s1 = F.exp(log_var1)
    squared_error = F.squared_error(pred0, pred1)
    with nn.context_scope(ctx):
        loss_sr = F.mean(
            squared_error * (c0 / s0 + c1 / s1) + (sc0 * s0 / s1 + sc1 * s1 / s0)) * 0.5
    return loss_sr
开发者ID:kzky,项目名称:works,代码行数:18,代码来源:cnn_model_055.py

示例9: sr_loss

def sr_loss(ctx, pred0, pred1):
    with nn.context_scope(ctx):
        loss_sr = F.mean(F.squared_error(pred0, pred1))
    return loss_sr
开发者ID:kzky,项目名称:works,代码行数:4,代码来源:cnn_model_060.py

示例10: sr_loss

def sr_loss(ctx, pred0, pred1):
    with nn.context_scope(ctx):
        pred_x_u0 = F.softmax(pred0)
        pred_x_u1 = F.softmax(pred1)
        loss_sr = F.mean(F.squared_error(pred_x_u0, pred_x_u1))
    return loss_sr
开发者ID:kzky,项目名称:works,代码行数:6,代码来源:cnn_model_005_001.py

示例11: recon_loss

def recon_loss(ctx, pred, x_l):
    with nn.context_scope(ctx):
        loss_recon = F.mean(F.squared_error(pred, x_l))
    return loss_recon
开发者ID:kzky,项目名称:works,代码行数:4,代码来源:cnn_ae_model_000.py


注:本文中的nnabla.functions.squared_error函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。