本文整理汇总了Python中mvpa2.clfs.base.Classifier类的典型用法代码示例。如果您正苦于以下问题:Python Classifier类的具体用法?Python Classifier怎么用?Python Classifier使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Classifier类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self, k=2, dfx=squared_euclidean_distance,
voting='weighted', **kwargs):
"""
Parameters
----------
k : unsigned integer
Number of nearest neighbours to be used for voting.
dfx : functor
Function to compute the distances between training and test samples.
Default: squared euclidean distance
voting : str
Voting method used to derive predictions from the nearest neighbors.
Possible values are 'majority' (simple majority of classes
determines vote) and 'weighted' (votes are weighted according to the
relative frequencies of each class in the training data).
**kwargs
Additional arguments are passed to the base class.
"""
# init base class first
Classifier.__init__(self, **kwargs)
self.__k = k
self.__dfx = dfx
self.__voting = voting
self.__data = None
self.__weights = None
示例2: __init__
def __init__(self, skl_learner, tags=None, enforce_dim=None,
**kwargs):
"""
Parameters
----------
skl_learner
Existing instance of a learner from skl. It should
implement `fit` and `predict`. If `predict_proba` is
available in the interface, then conditional attribute
`probabilities` becomes available as well
tags : list of string
What additional tags to attach to this learner. Tags are
used in the queries to classifier or regression warehouses.
enforce_dim : None or int, optional
If not None, it would enforce given dimensionality for
``predict`` call, if all other trailing dimensions are
degenerate.
"""
self._skl_learner = skl_learner
self.enforce_dim = enforce_dim
if tags:
# So we make a per-instance copy
self.__tags__ = self.__tags__ + tags
Classifier.__init__(self, **kwargs)
示例3: __init__
def __init__(self, lm=None, **kwargs):
"""
Initialize a ridge regression analysis.
Parameters
----------
lm : float
the penalty term lambda.
(Defaults to .05*nFeatures)
"""
# init base class first
Classifier.__init__(self, **kwargs)
# pylint happiness
self.w = None
# It does not make sense to calculate a confusion matrix for a
# ridge regression
self.ca.enable('training_stats', False)
# verify that they specified lambda
self.__lm = lm
# store train method config
self.__implementation = 'direct'
示例4: __init__
def __init__(self, **kwargs):
"""Initialize an SMLR classifier.
"""
"""
TODO:
# Add in likelihood calculation
# Add kernels, not just direct methods.
"""
# init base class first
Classifier.__init__(self, **kwargs)
if _cStepwiseRegression is None and self.params.implementation == 'C':
warning('SMLR: C implementation is not available.'
' Using pure Python one')
self.params.implementation = 'Python'
# pylint friendly initializations
self._ulabels = None
"""Unigue labels from the training set."""
self.__weights_all = None
"""Contains all weights including bias values"""
self.__weights = None
"""Just the weights, without the biases"""
self.__biases = None
"""The biases, will remain none if has_bias is False"""
示例5: __init__
def __init__(self, sigma_p = None, sigma_noise=1.0, **kwargs):
"""Initialize a BLR regression analysis.
Parameters
----------
sigma_noise : float
the standard deviation of the gaussian noise.
(Defaults to 0.1)
"""
# init base class first
Classifier.__init__(self, **kwargs)
# pylint happiness
self.w = None
# It does not make sense to calculate a confusion matrix for a
# BLR:
self.ca.enable('training_stats', False)
# set the prior on w: N(0,sigma_p) , specifying the covariance
# sigma_p on w:
self.sigma_p = sigma_p
# set noise level:
self.sigma_noise = sigma_noise
self.ca.predicted_variances = None
self.ca.log_marginal_likelihood = None
# Yarik: what was those about??? just for future in
# compute_log_marginal_likelihood ?
# self.targets = None
pass
示例6: __init__
def __init__(self, lm=1, criterion=1, reduced=0.0, maxiter=20, **kwargs):
"""
Initialize a penalized logistic regression analysis
Parameters
----------
lm : int
the penalty term lambda.
criterion : int
the criterion applied to judge convergence.
reduced : float
if not 0, the rank of the data is reduced before
performing the calculations. In that case, reduce is taken
as the fraction of the first singular value, at which a
dimension is not considered significant anymore. A
reasonable criterion is reduced=0.01
maxiter : int
maximum number of iterations. If no convergence occurs
after this number of iterations, an exception is raised.
"""
# init base class first
Classifier.__init__(self, **kwargs)
self.__lm = lm
self.__criterion = criterion
self.__reduced = reduced
self.__maxiter = maxiter
示例7: __init__
def __init__(self, model_type="lasso", trace=False, normalize=True,
intercept=True, max_steps=None, use_Gram=False, **kwargs):
"""
Initialize LARS.
See the help in R for further details on the following parameters:
Parameters
----------
model_type : string
Type of LARS to run. Can be one of ('lasso', 'lar',
'forward.stagewise', 'stepwise').
trace : boolean
Whether to print progress in R as it works.
normalize : boolean
Whether to normalize the L2 Norm.
intercept : boolean
Whether to add a non-penalized intercept to the model.
max_steps : None or int
If not None, specify the total number of iterations to run. Each
iteration adds a feature, but leaving it none will add until
convergence.
use_Gram : boolean
Whether to compute the Gram matrix (this should be false if you
have more features than samples.)
"""
# init base class first
Classifier.__init__(self, **kwargs)
if not model_type in known_models:
raise ValueError('Unknown model %s for LARS is specified. Known' %
model_type + 'are %s' % `known_models`)
# set up the params
self.__type = model_type
self.__normalize = normalize
self.__intercept = intercept
self.__trace = trace
self.__max_steps = max_steps
self.__use_Gram = use_Gram
# pylint friendly initializations
self.__lowest_Cp_step = None
self.__weights = None
"""The beta weights for each feature."""
self.__trained_model = None
"""The model object after training that will be used for
示例8: __repr__
def __repr__(self):
"""String representation of `SKLLearnerWrapper`
"""
prefixes = [repr(self._skl_learner)]
if self.__tags__ != ['skl']:
prefixes += ['tags=%r' % [t for t in self.__tags__ if t != 'skl']]
prefixes += _repr_attrs(self, ['enforce_dim'])
return Classifier.__repr__(self, prefixes=prefixes)
示例9: __init__
def __init__(self, **kwargs):
"""
Initialize GLM-Net.
See the help in R for further details on the parameters
"""
# init base class first
Classifier.__init__(self, **kwargs)
# pylint friendly initializations
self._utargets = None
self.__weights = None
"""The beta weights for each feature."""
self.__trained_model = None
"""The model object after training that will be used for
predictions."""
self.__last_lambda = None
"""Lambda obtained on the last step"""
示例10: __init__
def __init__(self, **kwargs):
"""Initialize an GNB classifier.
"""
# init base class first
Classifier.__init__(self, **kwargs)
# pylint friendly initializations
self.means = None
"""Means of features per class"""
self.variances = None
"""Variances per class, but "vars" is taken ;)"""
self.ulabels = None
"""Labels classifier was trained on"""
self.priors = None
"""Class probabilities"""
# Define internal state of classifier
self._norm_weight = None
示例11: __init__
def __init__(self, kernel=None, **kwargs):
"""Initialize a GPR regression analysis.
Parameters
----------
kernel : Kernel
a kernel object defining the covariance between instances.
(Defaults to SquaredExponentialKernel if None in arguments)
"""
# init base class first
Classifier.__init__(self, **kwargs)
# It does not make sense to calculate a confusion matrix for a GPR
# XXX it does ;) it will be a RegressionStatistics actually ;-)
# So if someone desires -- let him have it
# self.ca.enable('training_stats', False)
# set kernel:
if kernel is None:
kernel = SquaredExponentialKernel()
debug("GPR",
"No kernel was provided, falling back to default: %s"
% kernel)
self.__kernel = kernel
# append proper clf_internal depending on the kernel
# TODO: add "__tags__" to kernels since the check
# below does not scale
if isinstance(kernel, GeneralizedLinearKernel) or \
isinstance(kernel, LinearKernel):
self.__tags__ += ['linear']
else:
self.__tags__ += ['non-linear']
if externals.exists('openopt') \
and not 'has_sensitivity' in self.__tags__:
self.__tags__ += ['has_sensitivity']
# No need to initialize conditional attributes. Unless they got set
# they would raise an exception self.predicted_variances =
# None self.log_marginal_likelihood = None
self._init_internals()
pass
示例12: __init__
def __init__(self, lm=1.0, trace=False, normalize=True,
intercept=True, max_steps=None, **kwargs):
"""
Initialize ENET.
See the help in R for further details on the following parameters:
Parameters
----------
lm : float
Penalty parameter. 0 will perform LARS with no ridge regression.
Default is 1.0.
trace : boolean
Whether to print progress in R as it works.
normalize : boolean
Whether to normalize the L2 Norm.
intercept : boolean
Whether to add a non-penalized intercept to the model.
max_steps : None or int
If not None, specify the total number of iterations to run. Each
iteration adds a feature, but leaving it none will add until
convergence.
"""
# init base class first
Classifier.__init__(self, **kwargs)
# set up the params
self.__lm = lm
self.__normalize = normalize
self.__intercept = intercept
self.__trace = trace
self.__max_steps = max_steps
# pylint friendly initializations
self.__weights = None
"""The beta weights for each feature."""
self.__trained_model = None
"""The model object after training that will be used for
predictions."""
# It does not make sense to calculate a confusion matrix for a
# regression
self.ca.enable('training_stats', False)
示例13: __init__
def __init__(self, **kwargs):
"""Initialize a GDA classifier.
"""
# init base class first
Classifier.__init__(self, **kwargs)
# pylint friendly initializations
self.means = None
"""Means of features per class"""
self.cov = None
"""Co-variances per class, but "vars" is taken ;)"""
self.ulabels = None
"""Labels classifier was trained on"""
self.priors = None
"""Class probabilities"""
self.nsamples_per_class = None
"""Number of samples per class - used by derived classes"""
# Define internal state of classifier
self._norm_weight = None
示例14: __init__
def __init__(self, **kwargs):
"""Initialize an GNB classifier.
"""
# init base class first
Classifier.__init__(self, **kwargs)
# pylint friendly initializations
self.means = None
"""Means of features per class"""
self.variances = None
"""Variances per class, but "vars" is taken ;)"""
self.ulabels = None
"""Labels classifier was trained on"""
self.priors = None
"""Class probabilities"""
# Define internal state of classifier
self._norm_weight = None
# Add 'has_sensitivity' tag if classifier is linear
if self.params.common_variance:
self.__tags__ = self.__tags__ + ['has_sensitivity']
示例15: __init__
def __init__(self, learner, kwargs=None, kwargs_predict=None,
tags=None, **kwargs_):
"""
Parameters
----------
learner : string
kwargs : dict, optional
kwargs_predict : dict, optional
tags : list of string
What additional tags to attach to this classifier. Tags are
used in the queries to classifier or regression warehouses.
"""
self._learner = learner
self._kwargs = kwargs or {}
self._kwargs_predict = kwargs_predict or {}
if tags:
# So we make a per-instance copy
self.__tags__ = self.__tags__ + tags
Classifier.__init__(self, **kwargs_)