本文整理汇总了Python中mlxtend.classifier.Adaline.fit方法的典型用法代码示例。如果您正苦于以下问题:Python Adaline.fit方法的具体用法?Python Adaline.fit怎么用?Python Adaline.fit使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mlxtend.classifier.Adaline
的用法示例。
在下文中一共展示了Adaline.fit方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_gradient_descent
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_gradient_descent():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30, eta=0.01, learning='gd', random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例2: test_0_1_class
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_0_1_class():
t1 = np.array([0.51, -0.04, 0.51])
ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
ada.fit(X_std, y0)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y0 == ada.predict(X_std)).all())
示例3: test_refit_weights
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_refit_weights():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=15, eta=0.01, solver='gd', random_seed=1)
ada.fit(X_std, y1, init_weights=True)
ada.fit(X_std, y1, init_weights=False)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例4: test_stochastic_gradient_descent
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_stochastic_gradient_descent():
t1 = np.array([0.03, -0.09, 1.02])
ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例5: test_print_progress_2
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_print_progress_2():
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
print_progress=2,
random_seed=1)
ada.fit(X_std, y1)
示例6: test_ary_persistency_in_shuffling
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_ary_persistency_in_shuffling():
orig = X_std.copy()
ada = Adaline(epochs=30,
eta=0.01,
minibatches=len(y),
random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(orig, X_std, 6)
示例7: test_score_function
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_score_function():
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
random_seed=1)
ada.fit(X_std, y1)
acc = ada.score(X_std, y1)
assert acc == 1.0, acc
示例8: test_invalid_class
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_invalid_class():
ada = Adaline(epochs=40, eta=0.01, random_seed=1)
try:
ada.fit(X, y2) # 0, 1 class
assert(1==2)
except ValueError:
pass
示例9: test_stochastic_gradient_descent
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_stochastic_gradient_descent():
t1 = np.array([[-0.08], [1.02]])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=len(y),
random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例10: test_normal_equation
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_normal_equation():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=None,
random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例11: test_score_function
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_score_function():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
random_seed=1)
ada.fit(X_std, y1)
acc = ada.score(X_std, y1)
assert acc == 1.0, acc
示例12: test_standardized_iris_data_with_shuffle
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_standardized_iris_data_with_shuffle():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
solver='gd',
random_seed=1,
shuffle=True)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例13: test_refit_weights
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_refit_weights():
t1 = np.array([[-0.08], [1.02]])
ada = Adaline(epochs=15,
eta=0.01,
minibatches=1,
random_seed=1)
ada.fit(X_std, y1, init_params=True)
ada.fit(X_std, y1, init_params=False)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例14: test_standardized_iris_data_with_zero_weights
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_standardized_iris_data_with_zero_weights():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
random_seed=1,
zero_init_weight=True)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
示例15: test_normal_equation
# 需要导入模块: from mlxtend.classifier import Adaline [as 别名]
# 或者: from mlxtend.classifier.Adaline import fit [as 别名]
def test_normal_equation():
t1 = np.array([[-0.08], [1.02]])
b1 = np.array([0.00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=None,
random_seed=None)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, decimal=2)
np.testing.assert_almost_equal(ada.b_, b1, decimal=2)
assert (y1 == ada.predict(X_std)).all(), ada.predict(X_std)