本文整理汇总了Python中pystruct.learners.SubgradientSSVM.fit方法的典型用法代码示例。如果您正苦于以下问题:Python SubgradientSSVM.fit方法的具体用法?Python SubgradientSSVM.fit怎么用?Python SubgradientSSVM.fit使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pystruct.learners.SubgradientSSVM
的用法示例。
在下文中一共展示了SubgradientSSVM.fit方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_multinomial_checker_subgradient
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_multinomial_checker_subgradient():
X, Y = generate_checker_multinomial(n_samples=10, noise=0.4)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels, inference_method=inference_method)
clf = SubgradientSSVM(model=crf, max_iter=50)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例2: test_binary_blocks_subgradient
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_binary_blocks_subgradient():
#testing subgradient ssvm on easy binary dataset
X, Y = toy.generate_blocks(n_samples=10)
crf = GridCRF()
clf = SubgradientSSVM(model=crf, max_iter=200, C=100, learning_rate=0.1)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例3: test_binary_blocks
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_binary_blocks():
#testing subgradient ssvm on easy binary dataset
X, Y = generate_blocks(n_samples=5)
crf = GridCRF(inference_method=inference_method)
clf = SubgradientSSVM(model=crf)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例4: test_binary_blocks
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_binary_blocks():
#testing subgradient ssvm on easy binary dataset
X, Y = generate_blocks(n_samples=5)
crf = GridCRF(inference_method=inference_method)
clf = SubgradientSSVM(model=crf, C=100, learning_rate=1, decay_exponent=1,
momentum=0, decay_t0=10)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例5: test_multinomial_checker_subgradient
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_multinomial_checker_subgradient():
X, Y = toy.generate_checker_multinomial(n_samples=10, noise=0.0)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels)
clf = SubgradientSSVM(model=crf, max_iter=50, C=10,
momentum=.98, learning_rate=0.01)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例6: test_multinomial_blocks_subgradient_offline
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_multinomial_blocks_subgradient_offline():
#testing cutting plane ssvm on easy multinomial dataset
X, Y = generate_blocks_multinomial(n_samples=10, noise=0.6, seed=1)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels, inference_method=inference_method)
clf = SubgradientSSVM(model=crf, max_iter=100, online=False)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例7: test_binary_checker_subgradient
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_binary_checker_subgradient():
#testing subgradient ssvm on non-submodular binary dataset
X, Y = toy.generate_checker(n_samples=10)
crf = GridCRF()
clf = SubgradientSSVM(model=crf, max_iter=100, C=100, momentum=.9,
learning_rate=0.1)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例8: test_multinomial_blocks_subgradient
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_multinomial_blocks_subgradient():
#testing cutting plane ssvm on easy multinomial dataset
X, Y = generate_blocks_multinomial(n_samples=10, noise=0.3, seed=1)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels, inference_method=inference_method)
clf = SubgradientSSVM(model=crf, max_iter=50, C=10, momentum=.98,
learning_rate=0.001)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例9: test_blobs_2d_subgradient
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_blobs_2d_subgradient():
# make two gaussian blobs
X, Y = make_blobs(n_samples=80, centers=3, random_state=42)
# we have to add a constant 1 feature by hand :-/
X = np.hstack([X, np.ones((X.shape[0], 1))])
X_train, X_test, Y_train, Y_test = X[:40], X[40:], Y[:40], Y[40:]
pbl = MultiClassClf(n_features=3, n_classes=3)
svm = SubgradientSSVM(pbl, C=1000)
svm.fit(X_train, Y_train)
assert_array_equal(Y_test, np.hstack(svm.predict(X_test)))
示例10: test_binary_ssvm_attractive_potentials_edgefeaturegraph
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_binary_ssvm_attractive_potentials_edgefeaturegraph(inference_method="qpbo"):
X, Y = generate_blocks(n_samples=10)
crf = GridCRF(inference_method=inference_method)
#######
# convert X,Y to EdgeFeatureGraphCRF instances
crf_edge = EdgeFeatureGraphCRF(inference_method=inference_method,
symmetric_edge_features=[0]
)
X_edge = []
Y_edge = []
for i in range(X.shape[0]):
unaries = X[i].reshape((-1, 2))
edges = crf._get_edges(X[i])
edge_feats = np.ones((edges.shape[0], 1))
X_edge.append((unaries, edges, edge_feats))
Y_edge.append((Y[i].reshape((-1,))))
submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1,
verbose=1,
zero_constraint=[4,7],
negativity_constraint=[5,6],
)
# fit the model with non-negativity constraint on the off-diagonal potential
submodular_clf_edge.fit(X_edge, Y_edge)
assert submodular_clf_edge.w[5] == submodular_clf_edge.w[6] # symmetry constraint on edge features
# # # bias doesn't matter
# submodular_clf_edge.w += 10*np.ones(submodular_clf_edge.w.shape)
# print len(submodular_clf_edge.w), submodular_clf_edge.w
Y_pred = submodular_clf_edge.predict(X_edge)
assert_array_equal(Y_edge, Y_pred)
# try to fit the model with non-negativity constraint on the off-diagonal potential, this time
# with inverted sign on the edge features
X_edge_neg = [ (x[0], x[1], -x[2]) for x in X_edge ]
submodular_clf_edge = SubgradientSSVM(model=crf_edge, max_iter=100, C=1,
verbose=1,
zero_constraint=[4,7],
negativity_constraint=[5,6],
)
submodular_clf_edge.fit(X_edge_neg, Y_edge)
Y_pred = submodular_clf_edge.predict(X_edge_neg)
assert_array_equal(Y_edge, Y_pred)
示例11: test_subgradient_svm_as_crf_pickling
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_subgradient_svm_as_crf_pickling():
iris = load_iris()
X, y = iris.data, iris.target
X_ = [(np.atleast_2d(x), np.empty((0, 2), dtype=np.int)) for x in X]
Y = y.reshape(-1, 1)
X_train, X_test, y_train, y_test = train_test_split(X_, Y, random_state=1)
_, file_name = mkstemp()
pbl = GraphCRF(n_features=4, n_states=3, inference_method='unary')
logger = SaveLogger(file_name)
svm = SubgradientSSVM(pbl, logger=logger, max_iter=100)
svm.fit(X_train, y_train)
assert_less(.97, svm.score(X_test, y_test))
assert_less(.97, logger.load().score(X_test, y_test))
示例12: test_objective
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_objective():
# test that LatentSubgradientSSVM does the same as SubgradientSVM,
# in particular that it has the same loss, if there are no latent states.
X, Y = toy.generate_blocks_multinomial(n_samples=10)
n_labels = 3
crfl = LatentGridCRF(n_labels=n_labels, n_states_per_label=1)
clfl = LatentSubgradientSSVM(model=crfl, max_iter=50, C=10.,
learning_rate=0.001, momentum=0.98,
decay_exponent=0)
clfl.w = np.zeros(crfl.size_psi) # this disables random init
clfl.fit(X, Y)
crf = GridCRF(n_states=n_labels)
clf = SubgradientSSVM(model=crf, max_iter=50, C=10.,
learning_rate=0.001, momentum=0.98, decay_exponent=0)
clf.fit(X, Y)
assert_array_almost_equal(clf.w, clfl.w)
assert_array_equal(clf.predict(X), Y)
assert_almost_equal(clf.objective_curve_[-1], clfl.objective_curve_[-1])
示例13: test_objective
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
def test_objective():
# test that SubgradientLatentSSVM does the same as SubgradientSVM,
# in particular that it has the same loss, if there are no latent states.
X, Y = generate_blocks_multinomial(n_samples=10, noise=.3, seed=1)
inference_method = get_installed(["qpbo", "ad3", "lp"])[0]
n_labels = 3
crfl = LatentGridCRF(n_labels=n_labels, n_states_per_label=1,
inference_method=inference_method)
clfl = SubgradientLatentSSVM(model=crfl, max_iter=20, C=10.,
learning_rate=0.001, momentum=0.98)
crfl.initialize(X, Y)
clfl.w = np.zeros(crfl.size_joint_feature) # this disables random init
clfl.fit(X, Y)
crf = GridCRF(n_states=n_labels, inference_method=inference_method)
clf = SubgradientSSVM(model=crf, max_iter=20, C=10., learning_rate=0.001,
momentum=0.98)
clf.fit(X, Y)
assert_array_almost_equal(clf.w, clfl.w)
assert_almost_equal(clf.objective_curve_[-1], clfl.objective_curve_[-1])
assert_array_equal(clf.predict(X), clfl.predict(X))
assert_array_equal(clf.predict(X), Y)
示例14: NSlackSSVM
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
n_slack_svm = NSlackSSVM(crf, check_constraints=False,
max_iter=50, batch_size=1, tol=0.001)
one_slack_svm = OneSlackSSVM(crf, check_constraints=False,
max_iter=100, tol=0.001, inference_cache=50)
subgradient_svm = SubgradientSSVM(crf, learning_rate=0.001, max_iter=20,
decay_exponent=0, momentum=0)
bcfw_svm = FrankWolfeSSVM(crf, max_iter=50, check_dual_every=4)
#n-slack cutting plane ssvm
n_slack_svm.fit(X, Y)
# 1-slack cutting plane ssvm
one_slack_svm.fit(X, Y)
# online subgradient ssvm
subgradient_svm.fit(X, Y)
# Block coordinate Frank-Wolfe
bcfw_svm.fit(X, Y)
# don't plot objective from chached inference for 1-slack
inference_run = ~np.array(one_slack_svm.cached_constraint_)
time_one = np.array(one_slack_svm.timestamps_[1:])[inference_run]
# plot stuff
plt.plot(n_slack_svm.timestamps_[1:], n_slack_svm.objective_curve_,
label="n-slack cutting plane")
plt.plot(n_slack_svm.timestamps_[1:], n_slack_svm.primal_objective_curve_,
label="n-slack primal")
plt.plot(time_one,
np.array(one_slack_svm.objective_curve_)[inference_run],
示例15: time
# 需要导入模块: from pystruct.learners import SubgradientSSVM [as 别名]
# 或者: from pystruct.learners.SubgradientSSVM import fit [as 别名]
time_n_slack_svm = time() - start
y_pred = np.hstack(n_slack_svm.predict(X_test_bias))
print("Score with pystruct n-slack ssvm: %f (took %f seconds)"
% (np.mean(y_pred == y_test), time_n_slack_svm))
## 1-slack cutting plane ssvm
start = time()
one_slack_svm.fit(X_train_bias, y_train)
time_one_slack_svm = time() - start
y_pred = np.hstack(one_slack_svm.predict(X_test_bias))
print("Score with pystruct 1-slack ssvm: %f (took %f seconds)"
% (np.mean(y_pred == y_test), time_one_slack_svm))
#online subgradient ssvm
start = time()
subgradient_svm.fit(X_train_bias, y_train)
time_subgradient_svm = time() - start
y_pred = np.hstack(subgradient_svm.predict(X_test_bias))
print("Score with pystruct subgradient ssvm: %f (took %f seconds)"
% (np.mean(y_pred == y_test), time_subgradient_svm))
# the standard one-vs-rest multi-class would probably be as good and faster
# but solving a different model
libsvm = LinearSVC(multi_class='crammer_singer', C=.1)
start = time()
libsvm.fit(X_train, y_train)
time_libsvm = time() - start
print("Score with sklearn and libsvm: %f (took %f seconds)"
% (libsvm.score(X_test, y_test), time_libsvm))