当前位置: 首页>>代码示例>>Python>>正文


Python learners.SubgradientSSVM类代码示例

本文整理汇总了Python中pystruct.learners.SubgradientSSVM的典型用法代码示例。如果您正苦于以下问题:Python SubgradientSSVM类的具体用法?Python SubgradientSSVM怎么用?Python SubgradientSSVM使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了SubgradientSSVM类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_multinomial_checker_subgradient

def test_multinomial_checker_subgradient():
    X, Y = generate_checker_multinomial(n_samples=10, noise=0.4)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, max_iter=50)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:martinsch,项目名称:coulomb_ssvm,代码行数:8,代码来源:test_subgradient_svm.py

示例2: test_binary_blocks_subgradient

def test_binary_blocks_subgradient():
    #testing subgradient ssvm on easy binary dataset
    X, Y = toy.generate_blocks(n_samples=10)
    crf = GridCRF()
    clf = SubgradientSSVM(model=crf, max_iter=200, C=100, learning_rate=0.1)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:abhijitbendale,项目名称:pystruct,代码行数:8,代码来源:test_binary_grid.py

示例3: test_binary_blocks

def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:martinsch,项目名称:coulomb_ssvm,代码行数:8,代码来源:test_subgradient_svm.py

示例4: test_binary_blocks

def test_binary_blocks():
    #testing subgradient ssvm on easy binary dataset
    X, Y = generate_blocks(n_samples=5)
    crf = GridCRF(inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, C=100, learning_rate=1, decay_exponent=1,
                          momentum=0, decay_t0=10)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:DerThorsten,项目名称:pystruct,代码行数:9,代码来源:test_subgradient_svm.py

示例5: test_multinomial_checker_subgradient

def test_multinomial_checker_subgradient():
    X, Y = toy.generate_checker_multinomial(n_samples=10, noise=0.0)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels)
    clf = SubgradientSSVM(model=crf, max_iter=50, C=10,
                          momentum=.98, learning_rate=0.01)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:aurora1625,项目名称:pystruct,代码行数:9,代码来源:test_multinomial_grid.py

示例6: test_multinomial_blocks_subgradient_offline

def test_multinomial_blocks_subgradient_offline():
    #testing cutting plane ssvm on easy multinomial dataset
    X, Y = generate_blocks_multinomial(n_samples=10, noise=0.6, seed=1)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, max_iter=100, online=False)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:martinsch,项目名称:coulomb_ssvm,代码行数:9,代码来源:test_subgradient_svm.py

示例7: test_binary_checker_subgradient

def test_binary_checker_subgradient():
    #testing subgradient ssvm on non-submodular binary dataset
    X, Y = toy.generate_checker(n_samples=10)
    crf = GridCRF()
    clf = SubgradientSSVM(model=crf, max_iter=100, C=100, momentum=.9,
                          learning_rate=0.1)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:abhijitbendale,项目名称:pystruct,代码行数:9,代码来源:test_binary_grid.py

示例8: test_multinomial_blocks_subgradient

def test_multinomial_blocks_subgradient():
    #testing cutting plane ssvm on easy multinomial dataset
    X, Y = generate_blocks_multinomial(n_samples=10, noise=0.3, seed=1)
    n_labels = len(np.unique(Y))
    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, max_iter=50, C=10, momentum=.98,
                          learning_rate=0.001)
    clf.fit(X, Y)
    Y_pred = clf.predict(X)
    assert_array_equal(Y, Y_pred)
开发者ID:DerThorsten,项目名称:pystruct,代码行数:10,代码来源:test_subgradient_svm.py

示例9: test_blobs_2d_subgradient

def test_blobs_2d_subgradient():
    # make two gaussian blobs
    X, Y = make_blobs(n_samples=80, centers=3, random_state=42)
    # we have to add a constant 1 feature by hand :-/
    X = np.hstack([X, np.ones((X.shape[0], 1))])
    X_train, X_test, Y_train, Y_test = X[:40], X[40:], Y[:40], Y[40:]

    pbl = MultiClassClf(n_features=3, n_classes=3)
    svm = SubgradientSSVM(pbl, C=1000)

    svm.fit(X_train, Y_train)
    assert_array_equal(Y_test, np.hstack(svm.predict(X_test)))
开发者ID:DerThorsten,项目名称:pystruct,代码行数:12,代码来源:test_crammer_singer_svm.py

示例10: test_subgradient_svm_as_crf_pickling

def test_subgradient_svm_as_crf_pickling():

    iris = load_iris()
    X, y = iris.data, iris.target

    X_ = [(np.atleast_2d(x), np.empty((0, 2), dtype=np.int)) for x in X]
    Y = y.reshape(-1, 1)

    X_train, X_test, y_train, y_test = train_test_split(X_, Y, random_state=1)
    _, file_name = mkstemp()

    pbl = GraphCRF(n_features=4, n_states=3, inference_method='unary')
    logger = SaveLogger(file_name)
    svm = SubgradientSSVM(pbl, logger=logger, max_iter=100)
    svm.fit(X_train, y_train)

    assert_less(.97, svm.score(X_test, y_test))
    assert_less(.97, logger.load().score(X_test, y_test))
开发者ID:martinsch,项目名称:coulomb_ssvm,代码行数:18,代码来源:test_subgradient_svm.py

示例11: test_objective

def test_objective():
    # test that LatentSubgradientSSVM does the same as SubgradientSVM,
    # in particular that it has the same loss, if there are no latent states.
    X, Y = toy.generate_blocks_multinomial(n_samples=10)
    n_labels = 3
    crfl = LatentGridCRF(n_labels=n_labels, n_states_per_label=1)
    clfl = LatentSubgradientSSVM(model=crfl, max_iter=50, C=10.,
                                 learning_rate=0.001, momentum=0.98,
                                 decay_exponent=0)
    clfl.w = np.zeros(crfl.size_psi)  # this disables random init
    clfl.fit(X, Y)

    crf = GridCRF(n_states=n_labels)
    clf = SubgradientSSVM(model=crf, max_iter=50, C=10.,
                          learning_rate=0.001, momentum=0.98, decay_exponent=0)
    clf.fit(X, Y)
    assert_array_almost_equal(clf.w, clfl.w)
    assert_array_equal(clf.predict(X), Y)
    assert_almost_equal(clf.objective_curve_[-1], clfl.objective_curve_[-1])
开发者ID:aurora1625,项目名称:pystruct,代码行数:19,代码来源:test_subgradient_latent_svm.py

示例12: test_binary_ssvm_attractive_potentials_edgefeaturegraph

def test_binary_ssvm_attractive_potentials_edgefeaturegraph(inference_method="qpbo"):
    X, Y = generate_blocks(n_samples=10)
    crf = GridCRF(inference_method=inference_method)

    #######

    # convert X,Y to EdgeFeatureGraphCRF instances
    crf_edge = EdgeFeatureGraphCRF(inference_method=inference_method,
                                   symmetric_edge_features=[0]
                                    )
    X_edge = []
    Y_edge = []
    for i in range(X.shape[0]):
        unaries = X[i].reshape((-1, 2))
        edges = crf._get_edges(X[i])
        edge_feats = np.ones((edges.shape[0], 1))
        X_edge.append((unaries, edges, edge_feats))
        Y_edge.append((Y[i].reshape((-1,))))

    submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1,
                                verbose=1,
                                zero_constraint=[4,7],
                                negativity_constraint=[5,6],
                                )

    # fit the model with non-negativity constraint on the off-diagonal potential
    submodular_clf_edge.fit(X_edge, Y_edge)

    assert submodular_clf_edge.w[5] == submodular_clf_edge.w[6] # symmetry constraint on edge features

    # # # bias doesn't matter
    # submodular_clf_edge.w += 10*np.ones(submodular_clf_edge.w.shape)
    # print len(submodular_clf_edge.w), submodular_clf_edge.w

    Y_pred = submodular_clf_edge.predict(X_edge)
    assert_array_equal(Y_edge, Y_pred)

    # try to fit the model with non-negativity constraint on the off-diagonal potential, this time
    # with inverted sign on the edge features
    X_edge_neg = [ (x[0], x[1], -x[2]) for x in X_edge ]
    submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1,
                                verbose=1,
                                zero_constraint=[4,7],
                                negativity_constraint=[5,6],
                                )
    submodular_clf_edge.fit(X_edge_neg, Y_edge)
    Y_pred = submodular_clf_edge.predict(X_edge_neg)

    assert_array_equal(Y_edge, Y_pred)
开发者ID:martinsch,项目名称:coulomb_ssvm,代码行数:49,代码来源:test_subgradient_svm.py

示例13: test_objective

def test_objective():
    # test that SubgradientLatentSSVM does the same as SubgradientSVM,
    # in particular that it has the same loss, if there are no latent states.
    X, Y = generate_blocks_multinomial(n_samples=10, noise=.3, seed=1)
    inference_method = get_installed(["qpbo", "ad3", "lp"])[0]
    n_labels = 3
    crfl = LatentGridCRF(n_labels=n_labels, n_states_per_label=1,
                         inference_method=inference_method)
    clfl = SubgradientLatentSSVM(model=crfl, max_iter=20, C=10.,
                                 learning_rate=0.001, momentum=0.98)
    crfl.initialize(X, Y)
    clfl.w = np.zeros(crfl.size_joint_feature)  # this disables random init
    clfl.fit(X, Y)

    crf = GridCRF(n_states=n_labels, inference_method=inference_method)
    clf = SubgradientSSVM(model=crf, max_iter=20, C=10., learning_rate=0.001,
                          momentum=0.98)
    clf.fit(X, Y)
    assert_array_almost_equal(clf.w, clfl.w)
    assert_almost_equal(clf.objective_curve_[-1], clfl.objective_curve_[-1])
    assert_array_equal(clf.predict(X), clfl.predict(X))
    assert_array_equal(clf.predict(X), Y)
开发者ID:UIKit0,项目名称:pystruct,代码行数:22,代码来源:test_subgradient_latent_svm.py

示例14: import

import matplotlib.pyplot as plt

from pystruct.models import GridCRF
from pystruct.learners import (NSlackSSVM, OneSlackSSVM, SubgradientSSVM,
                               FrankWolfeSSVM)
from pystruct.datasets import generate_crosses_explicit

X, Y = generate_crosses_explicit(n_samples=50, noise=10, size=6, n_crosses=1)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels, inference_method=("ad3", {'branch_and_bound': True}))

n_slack_svm = NSlackSSVM(crf, check_constraints=False,
                         max_iter=50, batch_size=1, tol=0.001)
one_slack_svm = OneSlackSSVM(crf, check_constraints=False,
                             max_iter=100, tol=0.001, inference_cache=50)
subgradient_svm = SubgradientSSVM(crf, learning_rate=0.001, max_iter=20,
                                  decay_exponent=0, momentum=0)
bcfw_svm = FrankWolfeSSVM(crf, max_iter=50, check_dual_every=4)

#n-slack cutting plane ssvm
n_slack_svm.fit(X, Y)

# 1-slack cutting plane ssvm
one_slack_svm.fit(X, Y)

# online subgradient ssvm
subgradient_svm.fit(X, Y)

# Block coordinate Frank-Wolfe
bcfw_svm.fit(X, Y)

# don't plot objective from chached inference for 1-slack
开发者ID:DATAQC,项目名称:pystruct,代码行数:32,代码来源:plot_ssvm_objective_curves.py

示例15: load_digits

digits = load_digits()
X, y = digits.data, digits.target
#X = X / 255.
X = X / 16.
#y = y.astype(np.int) - 1
X_train, X_test, y_train, y_test = train_test_split(X, y)

# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
X_test_bias = np.hstack([X_test, np.ones((X_test.shape[0], 1))])

model = MultiClassClf(n_features=X_train_bias.shape[1], n_classes=10)
n_slack_svm = NSlackSSVM(model, verbose=2, check_constraints=False, C=0.1,
                         batch_size=100, tol=1e-2)
one_slack_svm = OneSlackSSVM(model, verbose=2, C=.10, tol=.001)
subgradient_svm = SubgradientSSVM(model, C=0.1, learning_rate=0.000001,
                                  max_iter=1000, verbose=0)

fw_bc_svm = FrankWolfeSSVM(model, C=.1, max_iter=50)
fw_batch_svm = FrankWolfeSSVM(model, C=.1, max_iter=50, batch_mode=True)

# n-slack cutting plane ssvm
start = time()
n_slack_svm.fit(X_train_bias, y_train)
time_n_slack_svm = time() - start
y_pred = np.hstack(n_slack_svm.predict(X_test_bias))
print("Score with pystruct n-slack ssvm: %f (took %f seconds)"
      % (np.mean(y_pred == y_test), time_n_slack_svm))

## 1-slack cutting plane ssvm
start = time()
one_slack_svm.fit(X_train_bias, y_train)
开发者ID:DATAQC,项目名称:pystruct,代码行数:32,代码来源:multi_class_svm.py


注:本文中的pystruct.learners.SubgradientSSVM类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。