本文整理汇总了Python中sklearn.kernel_approximation.Nystroem类的典型用法代码示例。如果您正苦于以下问题:Python Nystroem类的具体用法?Python Nystroem怎么用?Python Nystroem使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Nystroem类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_nystroem_callable
def test_nystroem_callable():
# Test Nystroem on a callable.
rnd = np.random.RandomState(42)
n_samples = 10
X = rnd.uniform(size=(n_samples, 4))
def logging_histogram_kernel(x, y, log):
"""Histogram kernel that writes to a log."""
log.append(1)
return np.minimum(x, y).sum()
kernel_log = []
X = list(X) # test input validation
Nystroem(kernel=logging_histogram_kernel,
n_components=(n_samples - 1),
kernel_params={'log': kernel_log}).fit(X)
assert_equal(len(kernel_log), n_samples * (n_samples - 1) / 2)
def linear_kernel(X, Y):
return np.dot(X, Y.T)
# if degree, gamma or coef0 is passed, we raise a warning
msg = "Don't pass gamma, coef0 or degree to Nystroem"
params = ({'gamma': 1}, {'coef0': 1}, {'degree': 2})
for param in params:
ny = Nystroem(kernel=linear_kernel, **param)
with pytest.raises(ValueError, match=msg):
ny.fit(X)
示例2: test_nystroem_poly_kernel_params
def test_nystroem_poly_kernel_params():
"""Non-regression: Nystroem should pass other parameters beside gamma."""
rnd = np.random.RandomState(37)
X = rnd.uniform(size=(10, 4))
K = polynomial_kernel(X, degree=3.1, coef0=.1)
nystroem = Nystroem(kernel="polynomial", n_components=X.shape[0],
degree=3.1, coef0=.1)
X_transformed = nystroem.fit_transform(X)
assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)
示例3: SparseKernelClassifier
class SparseKernelClassifier(CDClassifier):
def __init__(self, mode='exact', kernel='rbf', gamma=1e-3, C=1, alpha=1,
n_components=500, n_jobs=1, verbose=False):
self.mode = mode
self.kernel = kernel
self.gamma = gamma
self.C = C
self.alpha = alpha
self.n_components = n_components
self.n_jobs = n_jobs
self.verbose = verbose
super(SparseKernelClassifier, self).__init__(
C=C,
alpha=alpha,
loss='squared_hinge',
penalty='l1',
multiclass=False,
debiasing=True,
Cd=C,
warm_debiasing=True,
n_jobs=n_jobs,
verbose=False,
)
def fit(self, X, y):
if self.mode == 'exact':
K = pairwise_kernels(
X,
metric=self.kernel,
filter_params=True,
gamma=self.gamma
)
self.X_train_ = X
else:
self.kernel_sampler_ = Nystroem(
kernel=self.kernel,
gamma=self.gamma,
n_components=self.n_components
)
K = self.kernel_sampler_.fit_transform(X)
super(SparseKernelClassifier, self).fit(K, y)
return self
def decision_function(self, X):
if self.mode == 'exact':
K = pairwise_kernels(
X, self.X_train_,
metric=self.kernel,
filter_params=True,
gamma=self.gamma
)
else:
K = self.kernel_sampler_.transform(X)
return super(SparseKernelClassifier, self).decision_function(K)
示例4: test_nystroem_vs_sklearn
def test_nystroem_vs_sklearn():
np.random.seed(42)
X = np.random.randn(100, 5)
kernel = Nystroem(kernel='linear', random_state=42)
kernelR = NystroemR(kernel='linear', random_state=42)
y1 = kernel.fit_transform([X])[0]
y2 = kernelR.fit_transform(X)
assert_array_almost_equal(y1, y2)
示例5: WeightedSparseKernelClassifier
class WeightedSparseKernelClassifier(LinearSVC):
def __init__(
self, mode='exact', kernel='rbf', gamma=1e-3, C=1,
multi_class='ovr', class_weight='auto', n_components=5000,
verbose=False
):
self.mode = mode
self.kernel = kernel
self.gamma = gamma
self.C = C
self.multi_class = multi_class
self.class_weight = class_weight
self.n_components = n_components
self.verbose = verbose
super(WeightedSparseKernelClassifier, self).__init__(
C=C,
loss='squared_hinge',
penalty='l1',
dual=False,
verbose=verbose
)
def fit(self, X, y):
if self.mode == 'exact':
K = pairwise_kernels(
X,
metric=self.kernel,
filter_params=True,
gamma=self.gamma
)
self.X_train_ = X
else:
self.kernel_sampler_ = Nystroem(
kernel=self.kernel,
gamma=self.gamma,
n_components=self.n_components
)
K = self.kernel_sampler_.fit_transform(X)
return super(WeightedSparseKernelClassifier, self).fit(K, y)
def decision_function(self, X):
if self.mode == 'exact':
K = pairwise_kernels(
X, self.X_train_,
metric=self.kernel,
filter_params=True,
gamma=self.gamma
)
else:
K = self.kernel_sampler_.transform(X)
return super(WeightedSparseKernelClassifier, self).decision_function(K)
示例6: ApplyNystroemOnKernelMatrix
def ApplyNystroemOnKernelMatrix(x, kernelFn, nComponents):
"""
Given a data matrix (each row is an observation, each column is a variable) and a kernel function,
compute the Nystroem approximation of its uncentered Kernel matrix.
:param x: numpy matrix. Data matrix.
:param kernelFn: callable function. Returned by calling KernelSelector().
:param nComponents: integer. Number of ranks retained in Nystroem method.
:return
numpy matrix.
"""
nystroem = Nystroem(kernelFn, n_components=nComponents)
return np.matrix(nystroem.fit_transform(x))
示例7: test_nystroem_singular_kernel
def test_nystroem_singular_kernel():
# test that nystroem works with singular kernel matrix
rng = np.random.RandomState(0)
X = rng.rand(10, 20)
X = np.vstack([X] * 2) # duplicate samples
gamma = 100
N = Nystroem(gamma=gamma, n_components=X.shape[0]).fit(X)
X_transformed = N.transform(X)
K = rbf_kernel(X, gamma=gamma)
assert_array_almost_equal(K, np.dot(X_transformed, X_transformed.T))
assert_true(np.all(np.isfinite(Y)))
示例8: gram_Nystroem
def gram_Nystroem(self, x, nComponents):
"""
Nystroem approximation of the kernel matrix given data. No centering.
:type x: 2d array, with size n * p
:param x: data matrix for the covariates belonging to the same group, associated
with the given matrix.
:type nComponents: int
:param nComponents: number of rank to retain
:return: approximated kernel matrix with reduced rank, with size n * nComponents
"""
nystroem = Nystroem(self.fn, n_components=nComponents)
return nystroem.fit_transform(x)
示例9: fit
def fit(self, X, Y, weights=None, context_transform=True):
""" Trains policy by weighted maximum likelihood.
.. note:: This call changes this policy (self)
Parameters
----------
X: array-like, shape (n_samples, context_dims)
Context vectors
Y: array-like, shape (n_samples, weight_dims)
Low-level policy parameter vectors
weights: array-like, shape (n_samples,)
Weights of individual samples (should depend on the obtained
reward)
"""
# Kernel approximation
self.nystroem = Nystroem(
kernel=self.kernel,
gamma=self.gamma,
coef0=self.coef0,
n_components=np.minimum(X.shape[0], self.n_components),
random_state=self.random_state,
)
self.X = self.nystroem.fit_transform(X)
if self.bias:
self.X = np.hstack((self.X, np.ones((self.X.shape[0], 1))))
if self.normalize:
self.X /= np.abs(self.X).sum(1)[:, None]
# Standard ridge regression
ridge = Ridge(alpha=self.alpha, fit_intercept=False)
ridge.fit(self.X, Y, weights)
self.W = ridge.coef_
示例10: test_nystroem_default_parameters
def test_nystroem_default_parameters():
rnd = np.random.RandomState(42)
X = rnd.uniform(size=(10, 4))
# rbf kernel should behave as gamma=None by default
# aka gamma = 1 / n_features
nystroem = Nystroem(n_components=10)
X_transformed = nystroem.fit_transform(X)
K = rbf_kernel(X, gamma=None)
K2 = np.dot(X_transformed, X_transformed.T)
assert_array_almost_equal(K, K2)
# chi2 kernel should behave as gamma=1 by default
nystroem = Nystroem(kernel='chi2', n_components=10)
X_transformed = nystroem.fit_transform(X)
K = chi2_kernel(X, gamma=1)
K2 = np.dot(X_transformed, X_transformed.T)
assert_array_almost_equal(K, K2)
示例11: test_nystrom_approximation
def test_nystrom_approximation():
# some basic tests
rnd = np.random.RandomState(0)
X = rnd.uniform(size=(10, 4))
# With n_components = n_samples this is exact
X_transformed = Nystroem(n_components=X.shape[0]).fit_transform(X)
K = rbf_kernel(X)
assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)
trans = Nystroem(n_components=2, random_state=rnd)
X_transformed = trans.fit(X).transform(X)
assert_equal(X_transformed.shape, (X.shape[0], 2))
# test callable kernel
linear_kernel = lambda X, Y: np.dot(X, Y.T)
trans = Nystroem(n_components=2, kernel=linear_kernel, random_state=rnd)
X_transformed = trans.fit(X).transform(X)
assert_equal(X_transformed.shape, (X.shape[0], 2))
示例12: test_lndmrk_nystroem_approximation
def test_lndmrk_nystroem_approximation():
np.random.seed(42)
X = np.random.randn(100, 5)
u = np.arange(X.shape[0])[5::1]
v = np.arange(X.shape[0])[::1][:u.shape[0]]
lndmrks = X[np.unique((u, v))]
kernel = LandmarkNystroem(kernel='rbf', random_state=42)
kernelR = NystroemR(kernel='rbf', random_state=42)
y1_1 = kernel.fit_transform([X])[0]
kernel.landmarks = lndmrks
y1_2 = kernel.fit_transform([X])[0]
y2 = kernelR.fit_transform(X)
assert_array_almost_equal(y2, y1_1)
assert not all((np.abs(y2 - y1_2) > 1E-6).flatten())
示例13: __init__
class NystromScikit:
"""
Nystrom implementation form Scikit Learn wrapper.
The main difference is in selection of inducing inputs.
"""
def __init__(self, rank=10, random_state=42):
"""
:param rank: (``int``) Maximal decomposition rank.
:param random_state: (``int``) Random generator seed.
"""
self.trained = False
self.rank = rank
self.random_state = random_state
def fit(self, K, y):
"""
Fit approximation to the kernel function / matrix.
:param K: (``numpy.ndarray``) or of (``Kinterface``). The kernel to be approximated with G.
:param y: (``numpy.ndarray``) Class labels :math:`y_i \in {-1, 1}` or regression targets.
"""
assert isinstance(K, Kinterface)
self.n = K.shape[0]
kernel = lambda x, y: K.kernel(x, y, **K.kernel_args)
self.model = Nystroem(kernel=kernel,
n_components=self.rank,
random_state=self.random_state)
self.model.fit(K.data, y)
self.active_set_ = list(self.model.component_indices_[:self.rank])
assert len(set(self.active_set_)) == len(self.active_set_) == self.rank
R = self.model.normalization_
self.G = K[:, self.active_set_].dot(R)
self.trained = True
示例14: test_nystroem_approximation
def test_nystroem_approximation():
# some basic tests
rnd = np.random.RandomState(0)
X = rnd.uniform(size=(10, 4))
# With n_components = n_samples this is exact
X_transformed = Nystroem(n_components=X.shape[0]).fit_transform(X)
K = rbf_kernel(X)
assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)
trans = Nystroem(n_components=2, random_state=rnd)
X_transformed = trans.fit(X).transform(X)
assert_equal(X_transformed.shape, (X.shape[0], 2))
# test callable kernel
def linear_kernel(X, Y):
return np.dot(X, Y.T)
trans = Nystroem(n_components=2, kernel=linear_kernel, random_state=rnd)
X_transformed = trans.fit(X).transform(X)
assert_equal(X_transformed.shape, (X.shape[0], 2))
# test that available kernels fit and transform
kernels_available = kernel_metrics()
for kern in kernels_available:
trans = Nystroem(n_components=2, kernel=kern, random_state=rnd)
X_transformed = trans.fit(X).transform(X)
assert_equal(X_transformed.shape, (X.shape[0], 2))
示例15: LSH
class LSH():
def __init__(self, r=0.1, num_functions=50, dimensionality=128, gamma=1):
self.feature_map_LSH = discreteLSH(r, num_functions, dimensionality)
self.feature_map_nystroem = Nystroem(kernel='rbf', gamma=gamma, n_components=dimensionality)
def set_params(self, r=0.1, num_functions=50, dimensionality=128, gamma=1):
self.feature_map_LSH = discreteLSH(r, num_functions, dimensionality)
self.feature_map_nystroem = Nystroem(kernel='rbf', gamma=gamma, n_components=dimensionality)
def transform(self, X):
Xl = self.feature_map_nystroem.fit_transform(X)
return self.feature_map_LSH.transform(Xl)