当前位置: 首页>>代码示例>>Python>>正文


Python GaussianProcessRegressor.log_marginal_likelihood方法代码示例

本文整理汇总了Python中sklearn.gaussian_process.GaussianProcessRegressor.log_marginal_likelihood方法的典型用法代码示例。如果您正苦于以下问题:Python GaussianProcessRegressor.log_marginal_likelihood方法的具体用法?Python GaussianProcessRegressor.log_marginal_likelihood怎么用?Python GaussianProcessRegressor.log_marginal_likelihood使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在sklearn.gaussian_process.GaussianProcessRegressor的用法示例。


在下文中一共展示了GaussianProcessRegressor.log_marginal_likelihood方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_lml_improving

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def test_lml_improving():
    """ Test that hyperparameter-tuning improves log-marginal likelihood. """
    for kernel in kernels:
        if kernel == fixed_kernel: continue
        gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
        assert_greater(gpr.log_marginal_likelihood(gpr.kernel_.theta),
                       gpr.log_marginal_likelihood(kernel.theta))
开发者ID:AlexanderFabisch,项目名称:scikit-learn,代码行数:9,代码来源:test_gpr.py

示例2: test_lml_gradient

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def test_lml_gradient():
    """ Compare analytic and numeric gradient of log marginal likelihood. """
    for kernel in kernels:
        gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)

        lml, lml_gradient = gpr.log_marginal_likelihood(kernel.theta, True)
        lml_gradient_approx = approx_fprime(
            kernel.theta, lambda theta: gpr.log_marginal_likelihood(theta, False), 1e-10
        )

        assert_almost_equal(lml_gradient, lml_gradient_approx, 3)
开发者ID:Coding-dolphin,项目名称:scikit-learn,代码行数:13,代码来源:test_gpr.py

示例3: test_converged_to_local_maximum

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def test_converged_to_local_maximum(kernel):
    # Test that we are in local maximum after hyperparameter-optimization.
    gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)

    lml, lml_gradient = \
        gpr.log_marginal_likelihood(gpr.kernel_.theta, True)

    assert_true(np.all((np.abs(lml_gradient) < 1e-4) |
                       (gpr.kernel_.theta == gpr.kernel_.bounds[:, 0]) |
                       (gpr.kernel_.theta == gpr.kernel_.bounds[:, 1])))
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:12,代码来源:test_gpr.py

示例4: test_custom_optimizer

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def test_custom_optimizer():
    """ Test that GPR can use externally defined optimizers. """
    # Define a dummy optimizer that simply tests 50 random hyperparameters
    def optimizer(obj_func, initial_theta, bounds):
        rng = np.random.RandomState(0)
        theta_opt, func_min = initial_theta, obj_func(initial_theta, eval_gradient=False)
        for _ in range(50):
            theta = np.atleast_1d(rng.uniform(np.maximum(-2, bounds[:, 0]), np.minimum(1, bounds[:, 1])))
            f = obj_func(theta, eval_gradient=False)
            if f < func_min:
                theta_opt, func_min = theta, f
        return theta_opt, func_min

    for kernel in kernels:
        if kernel == fixed_kernel:
            continue
        gpr = GaussianProcessRegressor(kernel=kernel, optimizer=optimizer)
        gpr.fit(X, y)
        # Checks that optimizer improved marginal likelihood
        assert_greater(gpr.log_marginal_likelihood(gpr.kernel_.theta), gpr.log_marginal_likelihood(gpr.kernel.theta))
开发者ID:Coding-dolphin,项目名称:scikit-learn,代码行数:22,代码来源:test_gpr.py

示例5: test_random_starts

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def test_random_starts():
    # Test that an increasing number of random-starts of GP fitting only
    # increases the log marginal likelihood of the chosen theta.
    n_samples, n_features = 25, 2
    rng = np.random.RandomState(0)
    X = rng.randn(n_samples, n_features) * 2 - 1
    y = np.sin(X).sum(axis=1) + np.sin(3 * X).sum(axis=1) \
        + rng.normal(scale=0.1, size=n_samples)

    kernel = C(1.0, (1e-2, 1e2)) \
        * RBF(length_scale=[1.0] * n_features,
              length_scale_bounds=[(1e-4, 1e+2)] * n_features) \
        + WhiteKernel(noise_level=1e-5, noise_level_bounds=(1e-5, 1e1))
    last_lml = -np.inf
    for n_restarts_optimizer in range(5):
        gp = GaussianProcessRegressor(
            kernel=kernel, n_restarts_optimizer=n_restarts_optimizer,
            random_state=0,).fit(X, y)
        lml = gp.log_marginal_likelihood(gp.kernel_.theta)
        assert_greater(lml, last_lml - np.finfo(np.float32).eps)
        last_lml = lml
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:23,代码来源:test_gpr.py

示例6: RBF

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
plt.figure(0)
kernel = 1.0 * RBF(length_scale=100.0, length_scale_bounds=(1e-2, 1e3)) \
    + WhiteKernel(noise_level=1, noise_level_bounds=(1e-10, 1e+1))
gp = GaussianProcessRegressor(kernel=kernel,
                              alpha=0.0).fit(X, y)
X_ = np.linspace(0, 5, 100)
y_mean, y_cov = gp.predict(X_[:, np.newaxis], return_cov=True)
plt.plot(X_, y_mean, 'k', lw=3, zorder=9)
plt.fill_between(X_, y_mean - np.sqrt(np.diag(y_cov)),
                 y_mean + np.sqrt(np.diag(y_cov)),
                 alpha=0.5, color='k')
plt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10)
plt.title("Initial: %s\nOptimum: %s\nLog-Marginal-Likelihood: %s"
          % (kernel, gp.kernel_,
             gp.log_marginal_likelihood(gp.kernel_.theta)))
plt.tight_layout()

# Second run
plt.figure(1)
kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e3)) \
    + WhiteKernel(noise_level=1e-5, noise_level_bounds=(1e-10, 1e+1))
gp = GaussianProcessRegressor(kernel=kernel,
                              alpha=0.0).fit(X, y)
X_ = np.linspace(0, 5, 100)
y_mean, y_cov = gp.predict(X_[:, np.newaxis], return_cov=True)
plt.plot(X_, y_mean, 'k', lw=3, zorder=9)
plt.fill_between(X_, y_mean - np.sqrt(np.diag(y_cov)),
                 y_mean + np.sqrt(np.diag(y_cov)),
                 alpha=0.5, color='k')
plt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)
开发者ID:0664j35t3r,项目名称:scikit-learn,代码行数:33,代码来源:plot_gpr_noisy.py

示例7: C

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
# Specify stationary and non-stationary kernel
kernel_matern = C(1.0, (1e-10, 1000)) \
    * Matern(length_scale_bounds=(1e-1, 1e3), nu=1.5)
gp_matern = GaussianProcessRegressor(kernel=kernel_matern)

kernel_lls = C(1.0, (1e-10, 1000)) \
  * LocalLengthScalesKernel.construct(X, l_L=0.1, l_U=2.0, l_samples=5)
gp_lls = GaussianProcessRegressor(kernel=kernel_lls, optimizer=de_optimizer)

# Fit GPs
gp_matern.fit(X, y)
gp_lls.fit(X, y)

print "Learned kernel Matern: %s" % gp_matern.kernel_
print "Log-marginal-likelihood Matern: %s" \
    % gp_matern.log_marginal_likelihood(gp_matern.kernel_.theta)


print "Learned kernel LLS: %s" % gp_lls.kernel_
print "Log-marginal-likelihood LLS: %s" \
    % gp_lls.log_marginal_likelihood(gp_lls.kernel_.theta)

# Compute GP mean and standard deviation on test data
X_ = np.linspace(-1, 1, 500)

y_mean_lls, y_std_lls = gp_lls.predict(X_[:, np.newaxis], return_std=True)
y_mean_matern, y_std_matern = \
    gp_matern.predict(X_[:, np.newaxis], return_std=True)

plt.figure(figsize=(7, 7))
plt.subplot(2, 1, 1)
开发者ID:jmetzen,项目名称:gp_extras,代码行数:33,代码来源:plot_gpr_lls.py

示例8: GaussianProcessRegressor

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
print 'after kernel init'

# train the model
gp_test = GaussianProcessRegressor(kernel=kernel_test, alpha=8, normalize_y=True)

print 'after GP regressor'
# print("GPML kernel: %s" % gp_test.kernel_)
# print("Log-marginal-likelihood: %.3f"
#       % gp_test.log_marginal_likelihood(gp_test.kernel_.theta))

gp_test.fit(XT, y)

print("GPML kernel: %s" % gp_test.kernel_)
print("Log-marginal-likelihood: %.3f"
      % gp_test.log_marginal_likelihood(gp_test.kernel_.theta))


X_ = []
for i in range(15):
    X_.append([i+0.5, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1])
XT_ = scaler.transform(X_)
print 'X_ ', XT_
y_pred, y_std = gp_test.predict(XT_, return_std=True)

# Plot the predict result
X = np.array(X)
y = np.array(y)
X_ = np.array(X_)
plt.scatter(X[:, 0], y, c='k')
plt.plot(X_[:, 0], y_pred)
开发者ID:chenmin1107,项目名称:scikit-learn,代码行数:32,代码来源:test_SquareExpBool_kernel.py

示例9: RBF

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
k2 = 2.4**2 * RBF(length_scale=90.0) \
    * ExpSineSquared(length_scale=1.3, periodicity=1.0)  # seasonal component
# medium term irregularity
k3 = 0.66**2 \
    * RationalQuadratic(length_scale=1.2, alpha=0.78)
k4 = 0.18**2 * RBF(length_scale=0.134) \
    + WhiteKernel(noise_level=0.19**2)  # noise terms
kernel_gpml = k1 + k2 + k3 + k4

gp = GaussianProcessRegressor(kernel=kernel_gpml, alpha=0,
                              optimizer=None, normalize_y=True)
gp.fit(X, y)

print("GPML kernel: %s" % gp.kernel_)
print("Log-marginal-likelihood: %.3f"
      % gp.log_marginal_likelihood(gp.kernel_.theta))

# Kernel with optimized parameters
k1 = 50.0**2 * RBF(length_scale=50.0)  # long term smooth rising trend
k2 = 2.0**2 * RBF(length_scale=100.0) \
    * ExpSineSquared(length_scale=1.0, periodicity=1.0,
                     periodicity_bounds="fixed")  # seasonal component
# medium term irregularities
k3 = 0.5**2 * RationalQuadratic(length_scale=1.0, alpha=1.0)
k4 = 0.1**2 * RBF(length_scale=0.1) \
    + WhiteKernel(noise_level=0.1**2,
                  noise_level_bounds=(1e-3, np.inf))  # noise terms
kernel = k1 + k2 + k3 + k4

gp = GaussianProcessRegressor(kernel=kernel, alpha=0,
                              normalize_y=True)
开发者ID:allefpablo,项目名称:scikit-learn,代码行数:33,代码来源:plot_gpr_co2.py

示例10:

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
    plt.plot(X_, y_samples, lw=1)
    plt.xlim(0, 5)
    plt.ylim(-3, 3)
    plt.title("Prior (kernel:  %s)" % kernel, fontsize=12)

    # Generate data and fit GP
    rng = np.random.RandomState(4)
    X = rng.uniform(0, 5, 10)[:, np.newaxis]
    y = np.sin((X[:, 0] - 2.5) ** 2)
    gp.fit(X, y)

    # Plot posterior
    plt.subplot(2, 1, 2)
    X_ = np.linspace(0, 5, 100)
    y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
    plt.plot(X_, y_mean, "k", lw=3, zorder=9)
    plt.fill_between(X_, y_mean - y_std, y_mean + y_std, alpha=0.5, color="k")

    y_samples = gp.sample_y(X_[:, np.newaxis], 10)
    plt.plot(X_, y_samples, lw=1)
    plt.scatter(X[:, 0], y, c="r", s=50, zorder=10)
    plt.xlim(0, 5)
    plt.ylim(-3, 3)
    plt.title(
        "Posterior (kernel: %s)\n Log-Likelihood: %.3f" % (gp.kernel_, gp.log_marginal_likelihood(gp.kernel_.theta)),
        fontsize=12,
    )
    plt.tight_layout()

plt.show()
开发者ID:Claire-Ling-Liu,项目名称:scikit-learn,代码行数:32,代码来源:plot_gpr_prior_posterior.py

示例11: f

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def f(X):
    # target function is just a linear relationship + heteroscadastic noise
    return X + 0.5*np.random.multivariate_normal(np.zeros(X.shape[0]),
                                                 np.diag(X**2), 1)[0]

X = np.random.uniform(-7.5, 7.5, n_samples)  # input data
y = f(X)  # Generate target values by applying function to manifold

# Gaussian Process with RBF kernel and homoscedastic noise level
kernel_homo = C(1.0, (1e-10, 1000)) * RBF(1, (0.01, 100.0)) \
    + WhiteKernel(1e-3, (1e-10, 50.0))
gp_homoscedastic = GaussianProcessRegressor(kernel=kernel_homo, alpha=0)
gp_homoscedastic.fit(X[:, np.newaxis], y)
print "Homoscedastic kernel: %s" % gp_homoscedastic.kernel_
print "Homoscedastic LML: %.3f" \
    % gp_homoscedastic.log_marginal_likelihood(gp_homoscedastic.kernel_.theta)
print

# Gaussian Process with RBF kernel and heteroscedastic noise level
prototypes = KMeans(n_clusters=10).fit(X[:, np.newaxis]).cluster_centers_
kernel_hetero = C(1.0, (1e-10, 1000)) * RBF(1, (0.01, 100.0)) \
    + HeteroscedasticKernel.construct(prototypes, 1e-3, (1e-10, 50.0),
                                      gamma=5.0, gamma_bounds="fixed")
gp_heteroscedastic = GaussianProcessRegressor(kernel=kernel_hetero, alpha=0)
gp_heteroscedastic.fit(X[:, np.newaxis], y)
print "Heteroscedastic kernel: %s" % gp_heteroscedastic.kernel_
print "Heteroscedastic LML: %.3f" \
    % gp_heteroscedastic.log_marginal_likelihood(gp_heteroscedastic.kernel_.theta)


# Plot result
开发者ID:AlexanderFabisch,项目名称:gp_extras,代码行数:33,代码来源:plot_gpr_heteroscedastic_noise.py

示例12: test_lml_improving

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def test_lml_improving(kernel):
    # Test that hyperparameter-tuning improves log-marginal likelihood.
    gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
    assert_greater(gpr.log_marginal_likelihood(gpr.kernel_.theta),
                   gpr.log_marginal_likelihood(kernel.theta))
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:7,代码来源:test_gpr.py

示例13: plot

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def plot(df, options):

    UNIQ_GROUPS = df.group.unique()
    UNIQ_GROUPS.sort()

    sns.set_style("white")
    grppal = sns.color_palette("Set2", len(UNIQ_GROUPS))

    print '# UNIQ GROUPS', UNIQ_GROUPS

    cent_stats = df.groupby(['position', 'group']).apply(stats_per_group)
    cent_stats.reset_index(inplace=True)
    print cent_stats

    import time
    from sklearn import preprocessing
    from sklearn.gaussian_process import GaussianProcessRegressor
    from sklearn.gaussian_process.kernels import Matern, WhiteKernel, ExpSineSquared, ConstantKernel, RBF

    mean = cent_stats['values'].mean()

    # kernel = ConstantKernel() + Matern(length_scale=mean, nu=3 / 2) + \
        # WhiteKernel(noise_level=1e-10)

    kernel = 1**2 * Matern(length_scale=1, nu=1.5) + \
        WhiteKernel(noise_level=0.1)

    figure = plt.figure(figsize=(10, 6))

    palette = sns.color_palette('muted')
    for i,GRP in enumerate(UNIQ_GROUPS):
        groupDf = cent_stats[cent_stats['group'] == GRP]
        X = groupDf['position'].values.reshape((-1, 1))

        y = groupDf['values'].values.reshape((-1, 1))
        y = preprocessing.scale(y)
        
        N = groupDf['subj_count'].values.max()

        # sns.lmplot(x="position", y="values", row="group",
        #            fit_reg=False, data=groupDf)

        stime = time.time()
        gp = GaussianProcessRegressor(kernel=kernel, normalize_y=True)
        gp.fit(X, y)
        print gp.kernel_
        print gp.log_marginal_likelihood()

        print("Time for GPR fitting: %.3f" % (time.time() - stime))

        stime = time.time()

        pred_x = np.linspace(0, 30, 100)
        y_mean, y_std = gp.predict(pred_x.reshape((-1, 1)), return_std=True)
        y_mean = y_mean[:, 0]
        print("Time for GPR prediction: %.3f" % (time.time() - stime))

        group_color = palette[i]

        ci = y_std / math.sqrt(N) * 1.96
        plt.scatter(X, y, color=group_color, alpha=0.1)
        plt.plot(pred_x, y_mean, color=group_color)
        plt.fill_between(pred_x, y_mean - ci, y_mean +
                         ci, color=group_color, alpha=0.3)

    if options.title:
        plt.suptitle(options.title)

    if options.output:
        plt.savefig(options.output, dpi=150)

    if options.is_show:
        plt.show()
开发者ID:sinkpoint,项目名称:sagit,代码行数:75,代码来源:fiber_stats_gp.py

示例14: test_lml_precomputed

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
def test_lml_precomputed():
    """ Test that lml of optimized kernel is stored correctly. """
    for kernel in kernels:
        gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
        assert_equal(gpr.log_marginal_likelihood(gpr.kernel_.theta),
                     gpr.log_marginal_likelihood())
开发者ID:AlexanderFabisch,项目名称:scikit-learn,代码行数:8,代码来源:test_gpr.py

示例15: C

# 需要导入模块: from sklearn.gaussian_process import GaussianProcessRegressor [as 别名]
# 或者: from sklearn.gaussian_process.GaussianProcessRegressor import log_marginal_likelihood [as 别名]
architecture=((n_features, n_hidden, n_dim_manifold),)
kernel_nn = C(1.0, (1e-10, 100)) \
    * ManifoldKernel.construct(base_kernel=RBF(0.1, (1.0, 100.0)),
                               architecture=architecture,
                               transfer_fct="tanh", max_nn_weight=1.0) \
    + WhiteKernel(1e-3, (1e-10, 1e-1))
gp_nn = GaussianProcessRegressor(kernel=kernel_nn, alpha=0,
                                 n_restarts_optimizer=3)

# Fit GPs and create scatter plot on test data
gp.fit(X, y)
gp_nn.fit(X, y)

print "Initial kernel: %s" % gp_nn.kernel
print "Log-marginal-likelihood: %s" \
    % gp_nn.log_marginal_likelihood(gp_nn.kernel.theta)

print "Learned kernel: %s" % gp_nn.kernel_
print "Log-marginal-likelihood: %s" \
    % gp_nn.log_marginal_likelihood(gp_nn.kernel_.theta)

X_test_ = np.random.uniform(-5, 5, (1000, n_dim_manifold))
X_nn_test = X_test_.dot(A)
y_test = f(X_test_)
plt.figure(figsize=(8, 6))
plt.subplot(1, 2, 1)
plt.scatter(y_test, gp.predict(X_nn_test), c='b', label="GP RBF")
plt.scatter(y_test, gp_nn.predict(X_nn_test), c='r', label="GP NN")
plt.xlabel("True")
plt.ylabel("Predicted")
plt.legend(loc=0)
开发者ID:AlexanderFabisch,项目名称:gp_extras,代码行数:33,代码来源:plot_gpr_manifold.py


注:本文中的sklearn.gaussian_process.GaussianProcessRegressor.log_marginal_likelihood方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。