本文整理匯總了Python中sklearn.linear_model.Perceptron方法的典型用法代碼示例。如果您正苦於以下問題:Python linear_model.Perceptron方法的具體用法?Python linear_model.Perceptron怎麽用?Python linear_model.Perceptron使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類sklearn.linear_model
的用法示例。
在下文中一共展示了linear_model.Perceptron方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: fit
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def fit(self, X, y, classes=None, sample_weight=None):
""" Calls the Perceptron fit function from sklearn.
Parameters
----------
X: numpy.ndarray of shape (n_samples, n_features)
The feature's matrix.
y: Array-like
The class labels for all samples in X.
classes: Not used.
sample_weight:
Samples weight. If not provided, uniform weights are assumed.
Returns
-------
PerceptronMask
self
"""
self.classifier.fit(X=X, y=y, sample_weight=sample_weight)
return self
示例2: test_classification
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_classification():
# Check classification for various parameter settings.
rng = check_random_state(0)
X_train, X_test, y_train, y_test = train_test_split(iris.data,
iris.target,
random_state=rng)
grid = ParameterGrid({"max_samples": [0.5, 1.0],
"max_features": [1, 2, 4],
"bootstrap": [True, False],
"bootstrap_features": [True, False]})
for base_estimator in [None,
DummyClassifier(),
Perceptron(tol=1e-3),
DecisionTreeClassifier(),
KNeighborsClassifier(),
SVC(gamma="scale")]:
for params in grid:
BaggingClassifier(base_estimator=base_estimator,
random_state=rng,
**params).fit(X_train, y_train).predict(X_test)
示例3: test_gridsearch_pipeline_precomputed
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_gridsearch_pipeline_precomputed():
# Test if we can do a grid-search to find parameters to separate
# circles with a perceptron model using a precomputed kernel.
X, y = make_circles(n_samples=400, factor=.3, noise=.05,
random_state=0)
kpca = KernelPCA(kernel="precomputed", n_components=2)
pipeline = Pipeline([("kernel_pca", kpca),
("Perceptron", Perceptron(max_iter=5))])
param_grid = dict(Perceptron__max_iter=np.arange(1, 5))
grid_search = GridSearchCV(pipeline, cv=3, param_grid=param_grid)
X_kernel = rbf_kernel(X, gamma=2.)
grid_search.fit(X_kernel, y)
assert_equal(grid_search.best_score_, 1)
# 0.23. warning about tol not having its correct default value.
示例4: test_model_perceptron_binary_class
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_model_perceptron_binary_class(self):
model, X = fit_classification_model(
Perceptron(random_state=42), 2)
model_onnx = convert_sklearn(
model,
"scikit-learn Perceptron binary classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnPerceptronClassifierBinary-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
示例5: test_model_perceptron_multi_class
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_model_perceptron_multi_class(self):
model, X = fit_classification_model(
Perceptron(random_state=42), 5)
model_onnx = convert_sklearn(
model,
"scikit-learn Perceptron multi-class classifier",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.float32),
model,
model_onnx,
basename="SklearnPerceptronClassifierMulti-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
示例6: test_model_perceptron_binary_class_int
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_model_perceptron_binary_class_int(self):
model, X = fit_classification_model(
Perceptron(random_state=42), 2, is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn Perceptron binary classifier",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.int64),
model,
model_onnx,
basename="SklearnPerceptronClassifierBinaryInt-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
示例7: test_model_perceptron_multi_class_int
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_model_perceptron_multi_class_int(self):
model, X = fit_classification_model(
Perceptron(random_state=42), 5, is_int=True)
model_onnx = convert_sklearn(
model,
"scikit-learn Perceptron multi-class classifier",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X.astype(np.int64),
model,
model_onnx,
basename="SklearnPerceptronClassifierMultiInt-Out0",
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
示例8: test_fit
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_fit(example_estimate_competence, create_pool_classifiers):
X, y = example_estimate_competence[0:2]
knop_test = KNOP(create_pool_classifiers)
knop_test.fit(X, y)
expected_scores = np.array([[0.5, 0.5], [1.0, 0.0], [0.33, 0.67]])
expected_scores = np.tile(expected_scores, (15, 1, 1))
assert np.array_equal(expected_scores, knop_test.dsel_scores_)
# Assert the roc_algorithm_ is fitted to the scores (decision space)
# rather than the features (feature space)
expected_roc_data = knop_test.dsel_scores_[:, :, 0]
assert np.array_equal(knop_test.op_knn_._fit_X, expected_roc_data)
# Test if the class is raising an error when the base classifiers do not
# implements the predict_proba method. Should raise an exception when the
# base classifier cannot estimate posterior probabilities (predict_proba)
# Using Perceptron classifier as it does not implements predict_proba.
示例9: test_classification
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_classification():
# Check classification for various parameter settings.
rng = check_random_state(0)
X_train, X_test, y_train, y_test = train_test_split(iris.data,
iris.target,
random_state=rng)
grid = ParameterGrid({"max_samples": [0.5, 1.0],
"max_features": [1, 2, 4],
"bootstrap": [True, False],
"bootstrap_features": [True, False]})
for base_estimator in [None,
DummyClassifier(),
Perceptron(tol=1e-3),
DecisionTreeClassifier(),
KNeighborsClassifier(),
SVC()]:
for params in grid:
BaggingClassifier(base_estimator=base_estimator,
random_state=rng,
**params).fit(X_train, y_train).predict(X_test)
示例10: set_params
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def set_params(self, r=3, d=8, nbits=16, discrete=True,
balance=False, subsample_size=200, ratio=2,
normalization=False, inner_normalization=False,
penalty='elasticnet'):
"""setter."""
self.r = r
self.d = d
self.nbits = nbits
self.normalization = normalization
self.inner_normalization = inner_normalization
self.discrete = discrete
self.balance = balance
self.subsample_size = subsample_size
self.ratio = ratio
if penalty == 'perceptron':
self.model = Perceptron(max_iter=5, tol=None)
else:
self.model = SGDClassifier(
average=True, class_weight='balanced', shuffle=True,
penalty=penalty, max_iter=5, tol=None)
self.vectorizer = Vectorizer(
r=self.r, d=self.d,
normalization=self.normalization,
inner_normalization=self.inner_normalization,
discrete=self.discrete,
nbits=self.nbits)
return self
示例11: partial_fit
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def partial_fit(self, X, y, classes=None, sample_weight=None):
""" partial_fit
Calls the Perceptron partial_fit from sklearn.
Parameters
----------
X: numpy.ndarray of shape (n_samples, n_features)
The feature's matrix.
y: Array-like
The class labels for all samples in X.
classes: Not used.
sample_weight:
Samples weight. If not provided, uniform weights are assumed.
Returns
-------
PerceptronMask
self
"""
self.classifier.partial_fit(X=X, y=y, classes=classes, sample_weight=sample_weight)
return self
示例12: test_base
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_base():
# Check BaseEnsemble methods.
ensemble = BaggingClassifier(
base_estimator=Perceptron(tol=1e-3, random_state=None), n_estimators=3)
iris = load_iris()
ensemble.fit(iris.data, iris.target)
ensemble.estimators_ = [] # empty the list and create estimators manually
ensemble._make_estimator()
random_state = np.random.RandomState(3)
ensemble._make_estimator(random_state=random_state)
ensemble._make_estimator(random_state=random_state)
ensemble._make_estimator(append=False)
assert_equal(3, len(ensemble))
assert_equal(3, len(ensemble.estimators_))
assert isinstance(ensemble[0], Perceptron)
assert_equal(ensemble[0].random_state, None)
assert isinstance(ensemble[1].random_state, int)
assert isinstance(ensemble[2].random_state, int)
assert_not_equal(ensemble[1].random_state, ensemble[2].random_state)
np_int_ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
n_estimators=np.int32(3))
np_int_ensemble.fit(iris.data, iris.target)
示例13: test_base_zero_n_estimators
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_base_zero_n_estimators():
# Check that instantiating a BaseEnsemble with n_estimators<=0 raises
# a ValueError.
ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
n_estimators=0)
iris = load_iris()
assert_raise_message(ValueError,
"n_estimators must be greater than zero, got 0.",
ensemble.fit, iris.data, iris.target)
示例14: test_base_not_int_n_estimators
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_base_not_int_n_estimators():
# Check that instantiating a BaseEnsemble with a string as n_estimators
# raises a ValueError demanding n_estimators to be supplied as an integer.
string_ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
n_estimators='3')
iris = load_iris()
assert_raise_message(ValueError,
"n_estimators must be an integer",
string_ensemble.fit, iris.data, iris.target)
float_ensemble = BaggingClassifier(base_estimator=Perceptron(tol=1e-3),
n_estimators=3.0)
assert_raise_message(ValueError,
"n_estimators must be an integer",
float_ensemble.fit, iris.data, iris.target)
示例15: test_perceptron_accuracy
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import Perceptron [as 別名]
def test_perceptron_accuracy():
for data in (X, X_csr):
clf = Perceptron(max_iter=100, tol=None, shuffle=False)
clf.fit(data, y)
score = clf.score(data, y)
assert_greater(score, 0.7)
# 0.23. warning about tol not having its correct default value.