本文整理汇总了Python中shogun.Classifier.LibSVM类的典型用法代码示例。如果您正苦于以下问题:Python LibSVM类的具体用法?Python LibSVM怎么用?Python LibSVM使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了LibSVM类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: training_run
def training_run(options):
"""Conduct a training run and return a trained SVM kernel"""
settings = MotifFinderSettings(kirmes_ini.MOTIF_LENGTH, options.window_width, options.replace)
positives = MotifFinder(finder_settings=settings)
positives.setFastaFile(options.positives)
positives.setMotifs(options.pgff)
pmotifs, ppositions = positives.getResults()
negatives = MotifFinder(finder_settings=settings)
negatives.setFastaFile(options.negatives)
negatives.setMotifs(options.ngff)
nmotifs, npositions = negatives.getResults()
wds_kparams = kirmes_ini.WDS_KERNEL_PARAMETERS
wds_svm = EasySVM.EasySVM(wds_kparams)
num_positives = len(pmotifs.values()[0])
num_negatives = len(nmotifs.values()[0])
# Creating Kernel Objects
kernel = CombinedKernel()
features = CombinedFeatures()
kernel_array = []
motifs = pmotifs.keys()
motifs.sort()
# Adding Kmer Kernels
for motif in motifs:
all_examples = pmotifs[motif] + nmotifs[motif]
motif_features = wds_svm.createFeatures(all_examples)
wds_kernel = WeightedDegreePositionStringKernel(motif_features, motif_features, wds_kparams["degree"])
wds_kernel.set_shifts(wds_kparams["shift"] * ones(wds_kparams["seqlength"], dtype=int32))
features.append_feature_obj(motif_features)
kernel_array.append(wds_kernel)
kernel.append_kernel(wds_kernel)
rbf_svm = EasySVM.EasySVM(kirmes_ini.RBF_KERNEL_PARAMETERS)
positions = array(ppositions + npositions, dtype=float64).T
position_features = rbf_svm.createFeatures(positions)
features.append_feature_obj(position_features)
motif_labels = append(ones(num_positives), -ones(num_negatives))
complete_labels = Labels(motif_labels)
rbf_kernel = GaussianKernel(position_features, position_features, kirmes_ini.RBF_KERNEL_PARAMETERS["width"])
kernel_array.append(rbf_kernel)
kernel.append_kernel(rbf_kernel)
# Kernel init
kernel.init(features, features)
kernel.set_cache_size(kirmes_ini.K_CACHE_SIZE)
svm = LibSVM(kirmes_ini.K_COMBINED_C, kernel, complete_labels)
svm.parallel.set_num_threads(kirmes_ini.K_NUM_THREADS)
# Training
svm.train()
if not os.path.exists(options.output_path):
os.mkdir(options.output_path)
html = {}
if options.contrib:
html["contrib"] = contrib(svm, kernel, motif_labels, kernel_array, motifs)
if options.logos:
html["poims"] = poims(svm, kernel, kernel_array, motifs, options.output_path)
if options.query:
html["query"] = evaluate(options, svm, kernel, features, motifs)
htmlize(html, options.output_html)
示例2: modelselection_grid_search_kernel
def modelselection_grid_search_kernel():
num_subsets=3
num_vectors=20
dim_vectors=3
# create some (non-sense) data
matrix=rand(dim_vectors, num_vectors)
# create num_feautres 2-dimensional vectors
features=RealFeatures()
features.set_feature_matrix(matrix)
# create labels, two classes
labels=BinaryLabels(num_vectors)
for i in range(num_vectors):
labels.set_label(i, 1 if i%2==0 else -1)
# create svm
classifier=LibSVM()
# splitting strategy
splitting_strategy=StratifiedCrossValidationSplitting(labels, num_subsets)
# accuracy evaluation
evaluation_criterion=ContingencyTableEvaluation(ACCURACY)
# cross validation class for evaluation in model selection
cross=CrossValidation(classifier, features, labels, splitting_strategy, evaluation_criterion)
cross.set_num_runs(1)
# print all parameter available for modelselection
# Dont worry if yours is not included, simply write to the mailing list
classifier.print_modsel_params()
# model parameter selection
param_tree=create_param_tree()
param_tree.print_tree()
grid_search=GridSearchModelSelection(param_tree, cross)
print_state=True
best_combination=grid_search.select_model(print_state)
print("best parameter(s):")
best_combination.print_tree()
best_combination.apply_to_machine(classifier)
# larger number of runs to have tighter confidence intervals
cross.set_num_runs(10)
cross.set_conf_int_alpha(0.01)
result=cross.evaluate()
print("result: ")
result.print_result()
return 0
示例3: svm_train
def svm_train(kernel, labels, C1, C2=None):
"""Trains a SVM with the given kernel"""
num_threads = 1
kernel.io.disable_progress()
svm = LibSVM(C1, kernel, labels)
if C2:
svm.set_C(C1, C2)
svm.parallel.set_num_threads(num_threads)
svm.io.disable_progress()
svm.train()
return svm
示例4: init_svm
def init_svm(task_type, kernel, labels):
"""A factory for creating the right svm type"""
C=1
epsilon=1e-5
if task_type == 'Binary Classification':
svm = LibSVM(C, kernel, labels)
elif task_type == 'Multi Class Classification':
svm = LibSVMMultiClass(C, kernel, labels)
elif task_type == 'Regression':
tube_epsilon=1e-2
svm=LibSVR(C, epsilon, kernel, labels)
svm.set_tube_epsilon(tube_epsilon)
else:
print task_type + ' unknown!'
return svm
示例5: classifier_libsvm_minimal_modular
def classifier_libsvm_minimal_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_twoclass=label_traindat,width=2.1,C=1):
from shogun.Features import RealFeatures, BinaryLabels
from shogun.Classifier import LibSVM
from shogun.Kernel import GaussianKernel
feats_train=RealFeatures(fm_train_real);
feats_test=RealFeatures(fm_test_real);
kernel=GaussianKernel(feats_train, feats_train, width);
labels=BinaryLabels(label_train_twoclass);
svm=LibSVM(C, kernel, labels);
svm.train();
kernel.init(feats_train, feats_test);
out=svm.apply().get_labels();
testerr=mean(sign(out)!=label_train_twoclass)
示例6: bench_shogun
def bench_shogun(X, y, T, valid):
#
# .. Shogun ..
#
from shogun.Classifier import LibSVM
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import GaussianKernel
start = datetime.now()
feat = RealFeatures(X.T)
feat_test = RealFeatures(T.T)
labels = Labels(y.astype(np.float64))
kernel = GaussianKernel(feat, feat, sigma)
shogun_svm = LibSVM(1., kernel, labels)
shogun_svm.train()
dec_func = shogun_svm.classify(feat_test).get_labels()
score = np.mean(np.sign(dec_func) == valid)
return score, datetime.now() - start
示例7: libsvm
def libsvm ():
print 'LibSVM'
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import GaussianKernel
from shogun.Evaluation import PerformanceMeasures
from shogun.Classifier import LibSVM
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
width=2.1
kernel=GaussianKernel(feats_train, feats_train, width)
C=1
epsilon=1e-5
labels=Labels(label_train_twoclass)
svm=LibSVM(C, kernel, labels)
svm.set_epsilon(epsilon)
svm.train()
#kernel.init(feats_train, feats_test)
output = svm.classify(feats_test)#.get_labels()
#output_vector = output.get_labels()
out=svm.classify().get_labels()
testerr=mean(sign(out)!=testlab)
print testerr
示例8: classifier_multiclassmachine_modular
def classifier_multiclassmachine_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import GaussianKernel
from shogun.Classifier import LibSVM, KernelMulticlassMachine, ONE_VS_REST_STRATEGY
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=Labels(label_train_multiclass)
classifier = LibSVM(C, kernel, labels)
classifier.set_epsilon(epsilon)
mc_classifier = KernelMulticlassMachine(ONE_VS_REST_STRATEGY,kernel,classifier,labels)
mc_classifier.train()
kernel.init(feats_train, feats_test)
out = mc_classifier.apply().get_labels()
return out
示例9: classifier_multiclassmachine_modular
def classifier_multiclassmachine_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, MulticlassLabels
from shogun.Kernel import GaussianKernel
from shogun.Classifier import LibSVM, KernelMulticlassMachine, MulticlassOneVsRestStrategy
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=MulticlassLabels(label_train_multiclass)
classifier = LibSVM()
classifier.set_epsilon(epsilon)
#print labels.get_labels()
mc_classifier = KernelMulticlassMachine(MulticlassOneVsRestStrategy(),kernel,classifier,labels)
mc_classifier.train()
kernel.init(feats_train, feats_test)
out = mc_classifier.apply().get_labels()
return out
示例10: train
def train(self, trainexamples, trainlabels):
"""Trains a SVM with the given kernel"""
kernel_cache_size = 500
num_threads = 6
feats_train = self.createFeatures(trainexamples)
if self.kparam['name'] == 'wd':
self.kparam['seqlength'] = len(trainexamples[0])
self.createKernel(feats_train)
self.kernel.io.disable_progress()
self.kernel.set_cache_size(int(kernel_cache_size))
labels = Labels(numpy.array(trainlabels, numpy.double))
svm = LibSVM(self.getC(), self.kernel, labels)
svm.parallel.set_num_threads(num_threads)
svm.io.disable_progress()
svm.train()
return (svm, feats_train)
示例11: classifier_custom_kernel_modular
def classifier_custom_kernel_modular (C=1,dim=7):
from shogun.Features import RealFeatures, BinaryLabels
from shogun.Kernel import CustomKernel
from shogun.Classifier import LibSVM
from numpy import diag,ones,sign
from numpy.random import rand,seed
seed((C,dim))
lab=sign(2*rand(dim) - 1)
data=rand(dim, dim)
symdata=data*data.T + diag(ones(dim))
kernel=CustomKernel()
kernel.set_full_kernel_matrix_from_full(data)
labels=BinaryLabels(lab)
svm=LibSVM(C, kernel, labels)
svm.train()
predictions =svm.apply()
out=svm.apply().get_labels()
return svm,out
示例12: loadSVM
def loadSVM(pickled_svm_filename, C, labels):
"""Loads a Shogun SVM object which was pickled by saveSVM"""
from cPickle import Unpickler, PickleError
from shogun.Kernel import CombinedKernel
pickle_file = open(pickled_svm_filename, 'rb')
unpck = Unpickler(pickle_file)
(version, num_sv, name, bias, alphas, svs) = unpck.load()
if (version == __version__):
svm = LibSVM(num_sv) # same as .create_new_model(num_sv)
svm.set_bias(bias)
svm.set_alphas(alphas)
svm.set_support_vectors(svs)
kernel = CombinedKernel() #not sure if this is even required
kernel.set_name(name) # maybe not required
svm.set_kernel(kernel)
else:
print "File was pickled by another version of EasySVM.py or is not a kernel:"
print "Received from ", pickled_svm_filename, ": ", version, " expected: ", __version__
raise PickleError
return svm
示例13: classifier_libsvm_modular
def classifier_libsvm_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_twoclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import GaussianKernel
from shogun.Classifier import LibSVM
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=Labels(label_train_twoclass)
svm=LibSVM(C, kernel, labels)
svm.set_epsilon(epsilon)
svm.train()
kernel.init(feats_train, feats_test)
labels = svm.apply().get_labels()
supportvectors = sv_idx=svm.get_support_vectors()
alphas=svm.get_alphas()
predictions = svm.apply()
return predictions, svm, predictions.get_labels()
示例14: libsvm
def libsvm ():
print 'LibSVM'
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import GaussianKernel
from shogun.Classifier import LibSVM
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
width=2.1
kernel=GaussianKernel(feats_train, feats_train, width)
C=1
epsilon=1e-5
labels=Labels(label_train_twoclass)
svm=LibSVM(C, kernel, labels)
svm.set_epsilon(epsilon)
svm.train()
kernel.init(feats_train, feats_test)
svm.classify().get_labels()
sv_idx=svm.get_support_vectors()
alphas=svm.get_alphas()
示例15: kernel_combined_custom_poly_modular
def kernel_combined_custom_poly_modular(fm_train_real = traindat,fm_test_real = testdat,fm_label_twoclass=label_traindat):
from shogun.Features import CombinedFeatures, RealFeatures, BinaryLabels
from shogun.Kernel import CombinedKernel, PolyKernel, CustomKernel
from shogun.Classifier import LibSVM
kernel = CombinedKernel()
feats_train = CombinedFeatures()
tfeats = RealFeatures(fm_train_real)
tkernel = PolyKernel(10,3)
tkernel.init(tfeats, tfeats)
K = tkernel.get_kernel_matrix()
kernel.append_kernel(CustomKernel(K))
subkfeats_train = RealFeatures(fm_train_real)
feats_train.append_feature_obj(subkfeats_train)
subkernel = PolyKernel(10,2)
kernel.append_kernel(subkernel)
kernel.init(feats_train, feats_train)
labels = BinaryLabels(fm_label_twoclass)
svm = LibSVM(1.0, kernel, labels)
svm.train()
kernel = CombinedKernel()
feats_pred = CombinedFeatures()
pfeats = RealFeatures(fm_test_real)
tkernel = PolyKernel(10,3)
tkernel.init(tfeats, pfeats)
K = tkernel.get_kernel_matrix()
kernel.append_kernel(CustomKernel(K))
subkfeats_test = RealFeatures(fm_test_real)
feats_pred.append_feature_obj(subkfeats_test)
subkernel = PolyKernel(10, 2)
kernel.append_kernel(subkernel)
kernel.init(feats_train, feats_pred)
svm.set_kernel(kernel)
svm.apply()
km_train=kernel.get_kernel_matrix()
return km_train,kernel