本文整理汇总了Python中shogun.Kernel.GaussianKernel类的典型用法代码示例。如果您正苦于以下问题:Python GaussianKernel类的具体用法?Python GaussianKernel怎么用?Python GaussianKernel使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GaussianKernel类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: classify
def classify(classifier, features, labels, C=5, kernel_name=None, kernel_args=None):
from shogun.Features import RealFeatures
sigma = 10000
kernel = GaussianKernel(features, features, sigma)
# TODO
# kernel = LinearKernel(features, features)
# kernel = PolyKernel(features, features, 50, 2)
# kernel = kernels[kernel_name](features, features, *kernel_args)
svm = classifier(C, kernel, labels)
svm.train(features)
x_size = 640
y_size = 400
size = 100
x1 = np.linspace(0, x_size, size)
y1 = np.linspace(0, y_size, size)
x, y = np.meshgrid(x1, y1)
test = RealFeatures(np.array((np.ravel(x), np.ravel(y))))
kernel.init(features, test)
out = svm.apply(test).get_values()
if not len(out):
out = svm.apply(test).get_labels()
z = out.reshape((size, size))
z = np.transpose(z)
return x, y, z
示例2: mlprocess
def mlprocess(task_filename, data_filename, pred_filename, verbose=True):
"""Demo of creating machine learning process."""
task_type, fidx, lidx, train_idx, test_idx = parse_task(task_filename)
outputs = init_output(task_type)
all_data = parse_data(data_filename)
train_ex, train_lab, test_ex, test_lab = split_data(all_data, fidx, lidx, train_idx, test_idx)
label_train = outputs.str2label(train_lab)
if verbose:
print 'Number of features: %d' % train_ex.shape[0]
print '%d training examples, %d test examples' % (len(train_lab), len(test_lab))
feats_train = RealFeatures(train_ex)
feats_test = RealFeatures(test_ex)
width=1.0
kernel=GaussianKernel(feats_train, feats_train, width)
labels=Labels(label_train)
svm = init_svm(task_type, kernel, labels)
svm.train()
kernel.init(feats_train, feats_test)
preds = svm.classify().get_labels()
pred_label = outputs.label2str(preds)
pf = open(pred_filename, 'w')
for pred in pred_label:
pf.write(pred+'\n')
pf.close()
示例3: statistics_kmm
def statistics_kmm (n,d):
from shogun.Features import RealFeatures
from shogun.Features import DataGenerator
from shogun.Kernel import GaussianKernel, MSG_DEBUG
from shogun.Statistics import KernelMeanMatching
from shogun.Mathematics import Math
# init seed for reproducability
Math.init_random(1)
random.seed(1);
data = random.randn(d,n)
# create shogun feature representation
features=RealFeatures(data)
# use a kernel width of sigma=2, which is 8 in SHOGUN's parametrization
# which is k(x,y)=exp(-||x-y||^2 / tau), in constrast to the standard
# k(x,y)=exp(-||x-y||^2 / (2*sigma^2)), so tau=2*sigma^2
kernel=GaussianKernel(10,8)
kernel.init(features,features)
kmm = KernelMeanMatching(kernel,array([0,1,2,3,7,8,9],dtype=int32),array([4,5,6],dtype=int32))
w = kmm.compute_weights()
#print w
return w
示例4: regression_svrlight_modular
def regression_svrlight_modular(fm_train=traindat,fm_test=testdat,label_train=label_traindat, \
width=1.2,C=1,epsilon=1e-5,tube_epsilon=1e-2,num_threads=3):
from shogun.Features import Labels, RealFeatures
from shogun.Kernel import GaussianKernel
try:
from shogun.Regression import SVRLight
except ImportError:
print('No support for SVRLight available.')
return
feats_train=RealFeatures(fm_train)
feats_test=RealFeatures(fm_test)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=Labels(label_train)
svr=SVRLight(C, epsilon, kernel, labels)
svr.set_tube_epsilon(tube_epsilon)
svr.parallel.set_num_threads(num_threads)
svr.train()
kernel.init(feats_train, feats_test)
out = svr.apply().get_labels()
return out, kernel
示例5: kernel_io_modular
def kernel_io_modular (fm_train_real=traindat,fm_test_real=testdat,width=1.9):
from shogun.Features import RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Library import AsciiFile, BinaryFile
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
km_train=kernel.get_kernel_matrix()
f=AsciiFile("gaussian_train.ascii","w")
kernel.save(f)
del f
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
f=AsciiFile("gaussian_test.ascii","w")
kernel.save(f)
del f
#clean up
import os
os.unlink("gaussian_test.ascii")
os.unlink("gaussian_train.ascii")
return km_train, km_test, kernel
示例6: createKernel
def createKernel(self, feats_train):
"""Call the corresponding constructor for the kernel"""
if self.kparam['name'] == 'gauss':
kernel = GaussianKernel(feats_train, feats_train, self.kparam['width'])
elif self.kparam['name'] == 'linear':
kernel = LinearKernel(feats_train, feats_train, self.kparam['scale'])
elif self.kparam['name'] == 'poly':
kernel = PolyKernel(feats_train, feats_train, self.kparam['degree'],
self.kparam['inhomogene'], self.kparam['normal'])
elif self.kparam['name'] == 'wd':
kernel = WeightedDegreePositionStringKernel(feats_train, feats_train, self.kparam['degree'])
kernel.set_shifts(self.kparam['shift'] * numpy.ones(self.kparam['seqlength'], dtype=numpy.int32))
elif self.kparam['name'] == 'spec':
kernel = CommWordStringKernel(feats_train, feats_train)
elif self.kparam['name'] == 'localalign':
kernel = LocalAlignmentStringKernel(feats_train, feats_train)
elif self.kparam['name'] == 'localimprove':
kernel = LocalityImprovedStringKernel(feats_train, feats_train, self.kparam['length'], \
self.kparam['indeg'], self.kparam['outdeg'])
else:
print 'Unknown kernel %s' % self.kparam['name']
raise ValueError
self.kernel = kernel
return kernel
示例7: kernel_gaussian_modular
def kernel_gaussian_modular (fm_train_real=traindat,fm_test_real=testdat, width=1.3):
from shogun.Features import RealFeatures
from shogun.Kernel import GaussianKernel
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
示例8: create_param_tree
def create_param_tree():
root=ModelSelectionParameters()
c1=ModelSelectionParameters("C1")
root.append_child(c1)
c1.build_values(-1.0, 1.0, R_EXP)
c2=ModelSelectionParameters("C2")
root.append_child(c2)
c2.build_values(-1.0, 1.0, R_EXP)
gaussian_kernel=GaussianKernel()
# print all parameter available for modelselection
# Dont worry if yours is not included, simply write to the mailing list
gaussian_kernel.print_modsel_params()
param_gaussian_kernel=ModelSelectionParameters("kernel", gaussian_kernel)
gaussian_kernel_width=ModelSelectionParameters("width")
gaussian_kernel_width.build_values(-1.0, 1.0, R_EXP, 1.0, 2.0)
param_gaussian_kernel.append_child(gaussian_kernel_width)
root.append_child(param_gaussian_kernel)
power_kernel=PowerKernel()
# print all parameter available for modelselection
# Dont worry if yours is not included, simply write to the mailing list
power_kernel.print_modsel_params()
param_power_kernel=ModelSelectionParameters("kernel", power_kernel)
root.append_child(param_power_kernel)
param_power_kernel_degree=ModelSelectionParameters("degree")
param_power_kernel_degree.build_values(1.0, 2.0, R_LINEAR)
param_power_kernel.append_child(param_power_kernel_degree)
metric=MinkowskiMetric(10)
# print all parameter available for modelselection
# Dont worry if yours is not included, simply write to the mailing list
metric.print_modsel_params()
param_power_kernel_metric1=ModelSelectionParameters("distance", metric)
param_power_kernel.append_child(param_power_kernel_metric1)
param_power_kernel_metric1_k=ModelSelectionParameters("k")
param_power_kernel_metric1_k.build_values(1.0, 2.0, R_LINEAR)
param_power_kernel_metric1.append_child(param_power_kernel_metric1_k)
return root
示例9: gaussian
def gaussian ():
print 'Gaussian'
from shogun.Features import RealFeatures
from shogun.Kernel import GaussianKernel
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
width=1.9
kernel=GaussianKernel(feats_train, feats_train, width)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
示例10: classifier_libsvm_minimal_modular
def classifier_libsvm_minimal_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_twoclass=label_traindat,width=2.1,C=1):
from shogun.Features import RealFeatures, BinaryLabels
from shogun.Classifier import LibSVM
from shogun.Kernel import GaussianKernel
feats_train=RealFeatures(fm_train_real);
feats_test=RealFeatures(fm_test_real);
kernel=GaussianKernel(feats_train, feats_train, width);
labels=BinaryLabels(label_train_twoclass);
svm=LibSVM(C, kernel, labels);
svm.train();
kernel.init(feats_train, feats_test);
out=svm.apply().get_labels();
testerr=mean(sign(out)!=label_train_twoclass)
示例11: create_param_tree
def create_param_tree():
from shogun.ModelSelection import ModelSelectionParameters, R_EXP, R_LINEAR
from shogun.ModelSelection import ParameterCombination
from shogun.Kernel import GaussianKernel, PolyKernel
root=ModelSelectionParameters()
tau=ModelSelectionParameters("tau")
root.append_child(tau)
# also R_LINEAR/R_LOG is available as type
min=-1
max=1
type=R_EXP
step=1.5
base=2
tau.build_values(min, max, type, step, base)
# gaussian kernel with width
gaussian_kernel=GaussianKernel()
# print all parameter available for modelselection
# Dont worry if yours is not included but, write to the mailing list
gaussian_kernel.print_modsel_params()
param_gaussian_kernel=ModelSelectionParameters("kernel", gaussian_kernel)
gaussian_kernel_width=ModelSelectionParameters("width");
gaussian_kernel_width.build_values(5.0, 8.0, R_EXP, 1.0, 2.0)
param_gaussian_kernel.append_child(gaussian_kernel_width)
root.append_child(param_gaussian_kernel)
# polynomial kernel with degree
poly_kernel=PolyKernel()
# print all parameter available for modelselection
# Dont worry if yours is not included but, write to the mailing list
poly_kernel.print_modsel_params()
param_poly_kernel=ModelSelectionParameters("kernel", poly_kernel)
root.append_child(param_poly_kernel)
# note that integers are used here
param_poly_kernel_degree=ModelSelectionParameters("degree")
param_poly_kernel_degree.build_values(1, 2, R_LINEAR)
param_poly_kernel.append_child(param_poly_kernel_degree)
return root
示例12: regression_kernel_ridge_modular
def regression_kernel_ridge_modular (fm_train=traindat,fm_test=testdat,label_train=label_traindat,width=0.8,tau=1e-6):
from shogun.Features import Labels, RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Regression import KernelRidgeRegression
feats_train=RealFeatures(fm_train)
feats_test=RealFeatures(fm_test)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=Labels(label_train)
krr=KernelRidgeRegression(tau, kernel, labels)
krr.train(feats_train)
kernel.init(feats_train, feats_test)
out = krr.apply().get_labels()
return out,kernel,krr
示例13: classifier_multiclassmachine_modular
def classifier_multiclassmachine_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import GaussianKernel
from shogun.Classifier import LibSVM, KernelMulticlassMachine, ONE_VS_REST_STRATEGY
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=Labels(label_train_multiclass)
classifier = LibSVM(C, kernel, labels)
classifier.set_epsilon(epsilon)
mc_classifier = KernelMulticlassMachine(ONE_VS_REST_STRATEGY,kernel,classifier,labels)
mc_classifier.train()
kernel.init(feats_train, feats_test)
out = mc_classifier.apply().get_labels()
return out
示例14: classifier_gmnpsvm_modular
def classifier_gmnpsvm_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Kernel import GaussianKernel
from shogun.Classifier import GMNPSVM
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=MulticlassLabels(label_train_multiclass)
svm=GMNPSVM(C, kernel, labels)
svm.set_epsilon(epsilon)
svm.train(feats_train)
kernel.init(feats_train, feats_test)
out=svm.apply(feats_test).get_labels()
return out,kernel
示例15: classifier_multiclasslibsvm_modular
def classifier_multiclasslibsvm_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import GaussianKernel
from shogun.Classifier import MulticlassLibSVM
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=Labels(label_train_multiclass)
svm=MulticlassLibSVM(C, kernel, labels)
svm.set_epsilon(epsilon)
svm.train()
kernel.init(feats_train, feats_test)
out = svm.apply().get_labels()
predictions = svm.apply()
return predictions, svm, predictions.get_labels()