本文整理汇总了Python中lightning.primal_cd.CDClassifier.decision_function方法的典型用法代码示例。如果您正苦于以下问题:Python CDClassifier.decision_function方法的具体用法?Python CDClassifier.decision_function怎么用?Python CDClassifier.decision_function使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类lightning.primal_cd.CDClassifier
的用法示例。
在下文中一共展示了CDClassifier.decision_function方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_l1l2_multi_task_squared_hinge_loss
# 需要导入模块: from lightning.primal_cd import CDClassifier [as 别名]
# 或者: from lightning.primal_cd.CDClassifier import decision_function [as 别名]
def test_l1l2_multi_task_squared_hinge_loss():
Y = LabelBinarizer(neg_label=-1).fit_transform(mult_target)
clf = CDClassifier(penalty="l1/l2", loss="squared_hinge",
multiclass=False,
max_iter=20, C=5.0, random_state=0)
clf.fit(mult_dense, mult_target)
df = clf.decision_function(mult_dense)
assert_array_almost_equal(clf.errors_.T, 1 - Y * df)
assert_almost_equal(clf.score(mult_dense, mult_target), 0.8633, 3)
nz = np.sum(clf.coef_ != 0)
assert_equal(nz, 300)
clf = CDClassifier(penalty="l1/l2", loss="squared_hinge",
multiclass=False,
max_iter=20, C=0.05, random_state=0)
clf.fit(mult_dense, mult_target)
assert_almost_equal(clf.score(mult_dense, mult_target), 0.8266, 3)
nz = np.sum(clf.coef_ != 0)
assert_equal(nz, 231)
示例2: test_l1l2_multiclass_log_loss
# 需要导入模块: from lightning.primal_cd import CDClassifier [as 别名]
# 或者: from lightning.primal_cd.CDClassifier import decision_function [as 别名]
def test_l1l2_multiclass_log_loss():
for data in (mult_dense, mult_csc):
clf = CDClassifier(penalty="l1/l2", loss="log", multiclass=True,
max_steps=30, max_iter=5, C=1.0, random_state=0)
clf.fit(data, mult_target)
assert_almost_equal(clf.score(data, mult_target), 0.8766, 3)
df = clf.decision_function(data)
sel = np.array([df[i, int(mult_target[i])] for i in xrange(df.shape[0])])
df -= sel[:, np.newaxis]
df = np.exp(df)
assert_array_almost_equal(clf.errors_, df.T)
for i in xrange(data.shape[0]):
assert_almost_equal(clf.errors_[mult_target[i], i], 1.0)
nz = np.sum(clf.coef_ != 0)
assert_equal(nz, 297)
clf = CDClassifier(penalty="l1/l2", loss="log", multiclass=True,
max_steps=30, max_iter=5, C=0.3, random_state=0)
clf.fit(data, mult_target)
assert_almost_equal(clf.score(data, mult_target), 0.8566, 3)
nz = np.sum(clf.coef_ != 0)
assert_equal(nz, 213)
assert_true(nz % 3 == 0) # should be a multiple of n_classes
示例3: test_l1l2_multiclass_squared_hinge_loss_no_linesearch
# 需要导入模块: from lightning.primal_cd import CDClassifier [as 别名]
# 或者: from lightning.primal_cd.CDClassifier import decision_function [as 别名]
def test_l1l2_multiclass_squared_hinge_loss_no_linesearch():
data = mult_csc
clf = CDClassifier(penalty="l1/l2", loss="squared_hinge",
multiclass=True, shrinking=False, selection="uniform",
max_steps=0, max_iter=200, C=1.0, random_state=0)
clf.fit(data, mult_target)
assert_almost_equal(clf.score(data, mult_target), 0.9166, 3)
df = clf.decision_function(data)
n_samples, n_vectors = df.shape
diff = np.zeros_like(clf.errors_)
for i in xrange(n_samples):
for k in xrange(n_vectors):
diff[k, i] = 1 - (df[i, mult_target[i]] - df[i, k])
assert_array_almost_equal(clf.errors_, diff)
assert_equal(np.sum(clf.coef_ != 0), 300)
clf = CDClassifier(penalty="l1/l2", loss="squared_hinge",
multiclass=True,
max_iter=20, C=0.05, random_state=0)
clf.fit(data, mult_target)
assert_almost_equal(clf.score(data, mult_target), 0.83, 3)
nz = np.sum(clf.coef_ != 0)
assert_equal(nz, 207)
assert_true(nz % 3 == 0) # should be a multiple of n_classes