本文整理汇总了Python中pystruct.datasets.generate_blocks_multinomial函数的典型用法代码示例。如果您正苦于以下问题:Python generate_blocks_multinomial函数的具体用法?Python generate_blocks_multinomial怎么用?Python generate_blocks_multinomial使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了generate_blocks_multinomial函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_initialization
def test_initialization():
X, Y = generate_blocks_multinomial(noise=2, n_samples=1, seed=1)
x, y = X[0], Y[0]
n_states = x.shape[-1]
edge_list = make_grid_edges(x, 4, return_lists=True)
edges = np.vstack(edge_list)
edge_features = edge_list_to_features(edge_list)
x = (x.reshape(-1, n_states), edges, edge_features)
y = y.ravel()
crf = EdgeFeatureGraphCRF()
crf.initialize([x], [y])
assert_equal(crf.n_edge_features, 2)
assert_equal(crf.n_features, 3)
assert_equal(crf.n_states, 3)
crf = EdgeFeatureGraphCRF(n_states=3,
n_features=3,
n_edge_features=2)
# no-op
crf.initialize([x], [y])
crf = EdgeFeatureGraphCRF(n_states=4,
n_edge_features=2)
# incompatible
assert_raises(ValueError, crf.initialize, X=[x], Y=[y])
示例2: test_joint_feature_continuous
def test_joint_feature_continuous():
# FIXME
# first make perfect prediction, including pairwise part
X, Y = generate_blocks_multinomial(noise=2, n_samples=1, seed=1)
x, y = X[0], Y[0]
n_states = x.shape[-1]
pw_horz = -1 * np.eye(n_states)
xx, yy = np.indices(pw_horz.shape)
# linear ordering constraint horizontally
pw_horz[xx > yy] = 1
# high cost for unequal labels vertically
pw_vert = -1 * np.eye(n_states)
pw_vert[xx != yy] = 1
pw_vert *= 10
# create crf, assemble weight, make prediction
for inference_method in get_installed(["lp", "ad3"]):
crf = DirectionalGridCRF(inference_method=inference_method)
crf.initialize(X, Y)
w = np.hstack([np.eye(3).ravel(), -pw_horz.ravel(), -pw_vert.ravel()])
y_pred = crf.inference(x, w, relaxed=True)
# compute joint_feature for prediction
joint_feature_y = crf.joint_feature(x, y_pred)
assert_equal(joint_feature_y.shape, (crf.size_joint_feature,))
示例3: test_multinomial_blocks_frankwolfe_batch
def test_multinomial_blocks_frankwolfe_batch():
X, Y = generate_blocks_multinomial(n_samples=10, noise=0.3, seed=0)
crf = GridCRF(inference_method='qpbo')
clf = FrankWolfeSSVM(model=crf, C=1, max_iter=500, batch_mode=True)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例4: test_edge_feature_latent_node_crf_no_latent
def test_edge_feature_latent_node_crf_no_latent():
# no latent nodes
# Test inference with different weights in different directions
X, Y = generate_blocks_multinomial(noise=2, n_samples=1, seed=1, size_x=10)
x, y = X[0], Y[0]
n_states = x.shape[-1]
edge_list = make_grid_edges(x, 4, return_lists=True)
edges = np.vstack(edge_list)
pw_horz = -1 * np.eye(n_states + 5)
xx, yy = np.indices(pw_horz.shape)
# linear ordering constraint horizontally
pw_horz[xx > yy] = 1
# high cost for unequal labels vertically
pw_vert = -1 * np.eye(n_states + 5)
pw_vert[xx != yy] = 1
pw_vert *= 10
# generate edge weights
edge_weights_horizontal = np.repeat(pw_horz[np.newaxis, :, :],
edge_list[0].shape[0], axis=0)
edge_weights_vertical = np.repeat(pw_vert[np.newaxis, :, :],
edge_list[1].shape[0], axis=0)
edge_weights = np.vstack([edge_weights_horizontal, edge_weights_vertical])
# do inference
# pad x for hidden states...
x_padded = -100 * np.ones((x.shape[0], x.shape[1], x.shape[2] + 5))
x_padded[:, :, :x.shape[2]] = x
res = lp_general_graph(-x_padded.reshape(-1, n_states + 5), edges,
edge_weights)
edge_features = edge_list_to_features(edge_list)
x = (x.reshape(-1, n_states), edges, edge_features, 0)
y = y.ravel()
for inference_method in get_installed(["lp"]):
# same inference through CRF inferface
crf = EdgeFeatureLatentNodeCRF(n_labels=3,
inference_method=inference_method,
n_edge_features=2, n_hidden_states=5)
w = np.hstack([np.eye(3).ravel(), -pw_horz.ravel(), -pw_vert.ravel()])
y_pred = crf.inference(x, w, relaxed=True)
assert_array_almost_equal(res[0], y_pred[0].reshape(-1, n_states + 5),
4)
assert_array_almost_equal(res[1], y_pred[1], 4)
assert_array_equal(y, np.argmax(y_pred[0], axis=-1))
for inference_method in get_installed(["lp", "ad3", "qpbo"]):
# again, this time discrete predictions only
crf = EdgeFeatureLatentNodeCRF(n_labels=3,
inference_method=inference_method,
n_edge_features=2, n_hidden_states=5)
w = np.hstack([np.eye(3).ravel(), -pw_horz.ravel(), -pw_vert.ravel()])
y_pred = crf.inference(x, w, relaxed=False)
assert_array_equal(y, y_pred)
示例5: test_one_slack_constraint_caching
def test_one_slack_constraint_caching():
# testing cutting plane ssvm on easy multinomial dataset
X, Y = generate_blocks_multinomial(n_samples=10, noise=0.5, seed=0,
size_x=9)
n_labels = len(np.unique(Y))
exact_inference = get_installed([('ad3', {'branch_and_bound': True}), "lp"])[0]
crf = GridCRF(n_states=n_labels, inference_method=exact_inference)
clf = OneSlackSSVM(model=crf, max_iter=150, C=1,
check_constraints=True, break_on_bad=True,
inference_cache=50, inactive_window=0)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
assert_equal(len(clf.inference_cache_), len(X))
# there should be 13 constraints, which are less than the 94 iterations
# that are done
# check that we didn't change the behavior of how we construct the cache
constraints_per_sample = [len(cache) for cache in clf.inference_cache_]
if exact_inference == "lp":
assert_equal(len(clf.inference_cache_[0]), 18)
assert_equal(np.max(constraints_per_sample), 18)
assert_equal(np.min(constraints_per_sample), 18)
else:
assert_equal(len(clf.inference_cache_[0]), 13)
assert_equal(np.max(constraints_per_sample), 20)
assert_equal(np.min(constraints_per_sample), 11)
示例6: test_psi_continuous
def test_psi_continuous():
# FIXME
# first make perfect prediction, including pairwise part
X, Y = generate_blocks_multinomial(noise=2, n_samples=1, seed=1)
x, y = X[0], Y[0]
n_states = x.shape[-1]
edge_list = make_grid_edges(x, 4, return_lists=True)
edges = np.vstack(edge_list)
edge_features = edge_list_to_features(edge_list)
x = (x.reshape(-1, 3), edges, edge_features)
y = y.ravel()
pw_horz = -1 * np.eye(n_states)
xx, yy = np.indices(pw_horz.shape)
# linear ordering constraint horizontally
pw_horz[xx > yy] = 1
# high cost for unequal labels vertically
pw_vert = -1 * np.eye(n_states)
pw_vert[xx != yy] = 1
pw_vert *= 10
# create crf, assemble weight, make prediction
for inference_method in get_installed(["lp", "ad3"]):
crf = EdgeFeatureGraphCRF(inference_method=inference_method)
w = np.hstack([np.eye(3).ravel(), -pw_horz.ravel(), -pw_vert.ravel()])
crf.initialize([x], [y])
y_pred = crf.inference(x, w, relaxed=True)
# compute psi for prediction
psi_y = crf.psi(x, y_pred)
assert_equal(psi_y.shape, (crf.size_psi,))
示例7: test_joint_feature_discrete
def test_joint_feature_discrete():
"""
Testing with a single type of nodes. Must de aw well as EdgeFeatureGraphCRF
"""
X, Y = generate_blocks_multinomial(noise=2, n_samples=1, seed=1)
x, y = X[0], Y[0]
edge_list = make_grid_edges(x, 4, return_lists=True)
edges = np.vstack(edge_list)
edge_features = edge_list_to_features(edge_list)
x = ([x.reshape(-1, 3)], [edges], [edge_features])
y_flat = y.ravel()
#for inference_method in get_installed(["lp", "ad3", "qpbo"]):
if True:
crf = NodeTypeEdgeFeatureGraphCRF(1, [3], [3], [[2]])
joint_feature_y = crf.joint_feature(x, y_flat)
assert_equal(joint_feature_y.shape, (crf.size_joint_feature,))
# first horizontal, then vertical
# we trust the unaries ;)
n_states = crf.l_n_states[0]
n_features = crf.l_n_features[0]
pw_joint_feature_horz, pw_joint_feature_vert = joint_feature_y[n_states *
n_features:].reshape(
2, n_states, n_states)
assert_array_equal(pw_joint_feature_vert, np.diag([9 * 4, 9 * 4, 9 * 4]))
vert_joint_feature = np.diag([10 * 3, 10 * 3, 10 * 3])
vert_joint_feature[0, 1] = 10
vert_joint_feature[1, 2] = 10
assert_array_equal(pw_joint_feature_horz, vert_joint_feature)
示例8: test_switch_to_ad3
def test_switch_to_ad3():
# test if switching between qpbo and ad3 works
if not get_installed(['qpbo']) or not get_installed(['ad3']):
return
X, Y = generate_blocks_multinomial(n_samples=5, noise=1.5, seed=0)
crf = GridCRF(n_states=3, inference_method='qpbo')
ssvm = NSlackSSVM(crf, max_iter=10000)
ssvm_with_switch = NSlackSSVM(crf, max_iter=10000, switch_to=('ad3'))
ssvm.fit(X, Y)
ssvm_with_switch.fit(X, Y)
assert_equal(ssvm_with_switch.model.inference_method, 'ad3')
# we check that the dual is higher with ad3 inference
# as it might use the relaxation, that is pretty much guraranteed
assert_greater(ssvm_with_switch.objective_curve_[-1],
ssvm.objective_curve_[-1])
print(ssvm_with_switch.objective_curve_[-1], ssvm.objective_curve_[-1])
# test that convergence also results in switch
ssvm_with_switch = NSlackSSVM(crf, max_iter=10000, switch_to=('ad3'),
tol=10)
ssvm_with_switch.fit(X, Y)
assert_equal(ssvm_with_switch.model.inference_method, 'ad3')
示例9: test_multinomial_blocks
def test_multinomial_blocks():
X, Y = generate_blocks_multinomial(n_samples=10, noise=0.3, seed=0)
crf = GridCRF(n_states=X.shape[-1])
clf = StructuredPerceptron(model=crf, max_iter=10)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例10: test_max_product_multinomial_crf
def test_max_product_multinomial_crf():
X, Y = generate_blocks_multinomial(n_samples=1)
x, y = X[0], Y[0]
w = np.array([1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.4, -0.3, 0.3, -0.5, -0.1, 0.3]) # unary # pairwise
crf = GridCRF(inference_method="max-product")
crf.initialize(X, Y)
y_hat = crf.inference(x, w)
assert_array_equal(y, y_hat)
示例11: test_multinomial_blocks_subgradient_offline
def test_multinomial_blocks_subgradient_offline():
#testing cutting plane ssvm on easy multinomial dataset
X, Y = generate_blocks_multinomial(n_samples=10, noise=0.6, seed=1)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels, inference_method=inference_method)
clf = SubgradientSSVM(model=crf, max_iter=100, online=False)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例12: test_multinomial_blocks_frankwolfe
def test_multinomial_blocks_frankwolfe():
X, Y = generate_blocks_multinomial(n_samples=50, noise=0.5,
seed=0)
crf = GridCRF(inference_method='qpbo')
clf = FrankWolfeSSVM(model=crf, C=1, line_search=True,
batch_mode=False, check_dual_every=500)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例13: test_blocks_multinomial_crf
def test_blocks_multinomial_crf():
X, Y = generate_blocks_multinomial(n_samples=1, size_x=9, seed=0)
x, y = X[0], Y[0]
w = np.array([1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.4, -0.3, 0.3, -0.5, -0.1, 0.3]) # unaryA # pairwise
for inference_method in get_installed():
crf = GridCRF(inference_method=inference_method)
crf.initialize(X, Y)
y_hat = crf.inference(x, w)
assert_array_equal(y, y_hat)
示例14: test_multinomial_blocks_cutting_plane
def test_multinomial_blocks_cutting_plane():
#testing cutting plane ssvm on easy multinomial dataset
X, Y = generate_blocks_multinomial(n_samples=40, noise=0.5, seed=0)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels, inference_method=inference_method)
clf = NSlackSSVM(model=crf, max_iter=100, C=100, check_constraints=False,
batch_size=1)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)
示例15: test_multinomial_blocks_subgradient
def test_multinomial_blocks_subgradient():
#testing cutting plane ssvm on easy multinomial dataset
X, Y = generate_blocks_multinomial(n_samples=10, noise=0.3, seed=1)
n_labels = len(np.unique(Y))
crf = GridCRF(n_states=n_labels, inference_method=inference_method)
clf = SubgradientSSVM(model=crf, max_iter=50, C=10, momentum=.98,
learning_rate=0.001)
clf.fit(X, Y)
Y_pred = clf.predict(X)
assert_array_equal(Y, Y_pred)