本文整理汇总了Python中shogun.Features.RealFeatures类的典型用法代码示例。如果您正苦于以下问题:Python RealFeatures类的具体用法?Python RealFeatures怎么用?Python RealFeatures使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了RealFeatures类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: createFeatures
def createFeatures(self, examples):
"""Converts numpy arrays or sequences into shogun features"""
if self.kparam['name'] == 'gauss' or self.kparam['name'] == 'linear' or self.kparam['name'] == 'poly':
examples = numpy.array(examples)
feats = RealFeatures(examples)
elif self.kparam['name'] == 'wd' or self.kparam['name'] == 'localalign' or self.kparam['name'] == 'localimprove':
#examples = non_atcg_convert(examples, nuc_con)
feats = StringCharFeatures(examples, DNA)
elif self.kparam['name'] == 'spec':
#examples = non_atcg_convert(examples, nuc_con)
feats = StringCharFeatures(examples, DNA)
wf = StringUlongFeatures( feats.get_alphabet() )
wf.obtain_from_char(feats, kparam['degree']-1, kparam['degree'], 0, kname=='cumspec')
del feats
if train_mode:
preproc = SortUlongString()
preproc.init(wf)
wf.add_preproc(preproc)
ret = wf.apply_preproc()
feats = wf
else:
print 'Unknown kernel %s' % self.kparam['name']
raise ValueError
return feats
示例2: prune_var_sub_mean
def prune_var_sub_mean ():
print 'PruneVarSubMean'
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import PruneVarSubMean
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preproc=PruneVarSubMean()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
width=1.4
size_cache=10
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
示例3: features_simple_modular
def features_simple_modular(A=matrixA,B=matrixB,C=matrixC):
a=RealFeatures(A)
b=LongIntFeatures(B)
c=ByteFeatures(C)
# or 16bit wide ...
#feat1 = f.ShortFeatures(N.zeros((10,5),N.short))
#feat2 = f.WordFeatures(N.zeros((10,5),N.uint16))
# print some statistics about a
# get first feature vector and set it
a.set_feature_vector(array([1,4,0,0,0,9], dtype=float64), 0)
# get matrices
a_out = a.get_feature_matrix()
b_out = b.get_feature_matrix()
c_out = c.get_feature_matrix()
assert(all(a_out==A))
assert(all(b_out==B))
assert(all(c_out==C))
return a_out,b_out,c_out,a,b,c
示例4: norm_one
def norm_one ():
print 'NormOne'
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import NormOne
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preproc=NormOne()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
width=1.4
size_cache=10
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
示例5: modelselection_grid_search_kernel
def modelselection_grid_search_kernel():
num_subsets=3
num_vectors=20
dim_vectors=3
# create some (non-sense) data
matrix=rand(dim_vectors, num_vectors)
# create num_feautres 2-dimensional vectors
features=RealFeatures()
features.set_feature_matrix(matrix)
# create labels, two classes
labels=BinaryLabels(num_vectors)
for i in range(num_vectors):
labels.set_label(i, 1 if i%2==0 else -1)
# create svm
classifier=LibSVM()
# splitting strategy
splitting_strategy=StratifiedCrossValidationSplitting(labels, num_subsets)
# accuracy evaluation
evaluation_criterion=ContingencyTableEvaluation(ACCURACY)
# cross validation class for evaluation in model selection
cross=CrossValidation(classifier, features, labels, splitting_strategy, evaluation_criterion)
cross.set_num_runs(1)
# print all parameter available for modelselection
# Dont worry if yours is not included, simply write to the mailing list
classifier.print_modsel_params()
# model parameter selection
param_tree=create_param_tree()
param_tree.print_tree()
grid_search=GridSearchModelSelection(param_tree, cross)
print_state=True
best_combination=grid_search.select_model(print_state)
print("best parameter(s):")
best_combination.print_tree()
best_combination.apply_to_machine(classifier)
# larger number of runs to have tighter confidence intervals
cross.set_num_runs(10)
cross.set_conf_int_alpha(0.01)
result=cross.evaluate()
print("result: ")
result.print_result()
return 0
示例6: features_dense_zero_copy_modular
def features_dense_zero_copy_modular (in_data=data):
feats = None
if numpy.__version__ >= '1.5':
feats=numpy.array(in_data, dtype=float64, order='F')
a=RealFeatures()
a.frombuffer(feats, False)
b=numpy.array(a, copy=False)
c=numpy.array(a, copy=True)
d=RealFeatures()
d.frombuffer(a, False)
e=RealFeatures()
e.frombuffer(a, True)
a[:,0]=0
print a[0:4]
print b[0:4]
print c[0:4]
print d[0:4]
print e[0:4]
else:
print "numpy version >= 1.5 is needed"
return feats
示例7: distance_mahalanobis_modular
def distance_mahalanobis_modular (fm_train_real = traindat, fm_test_real = testdat):
from shogun.Features import RealFeatures
from shogun.Distance import MahalanobisDistance
feats_train = RealFeatures(fm_train_real)
feats_test = RealFeatures(fm_test_real)
distance = MahalanobisDistance(feats_test, feats_train)
for i in range(feats_test.get_num_vectors()):
for j in range(feats_train.get_num_vectors()):
dm = distance.distance(i, j)
print dm
示例8: prepare_feats
def prepare_feats(desc, l=2, as_shogun=False):
if l==2: desc = np.sqrt(desc) #bias not afected by sqrt
norms = np.apply_along_axis(np.linalg.norm, 0, desc[:-1,:], l) #leave bias alone
np.seterr(divide='ignore', invalid='ignore')
desc[:-1,:]=desc[:-1,:]/norms #leave bias alone
np.seterr(divide='warn', invalid='warn')
if l==1: desc=desc[:-1,:] #removing bias dim if L1 -> nonlinear TODO find better way...
desc[np.isnan(desc)]=0 #handle NaNs
if as_shogun:
desc=RealFeatures(desc.astype('float'))
return desc
示例9: features_dense_real_modular
def features_dense_real_modular(A=matrix):
# ... of type Real, LongInt and Byte
a = RealFeatures(A)
# print(some statistics about a)
# print(a.get_num_vectors())
# print(a.get_num_features())
# get first feature vector and set it
# print(a.get_feature_vector(0))
a.set_feature_vector(array([1, 4, 0, 0, 0, 9], dtype=float64), 0)
# get matrix
a_out = a.get_feature_matrix()
assert all(a_out == A)
return a_out
示例10: features_director_dot_modular
def features_director_dot_modular (fm_train_real, fm_test_real,
label_train_twoclass, C, epsilon):
from shogun.Features import RealFeatures, SparseRealFeatures, BinaryLabels
from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC_DUAL
from shogun.Mathematics import Math_init_random
Math_init_random(17)
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
labels=BinaryLabels(label_train_twoclass)
dfeats_train=NumpyFeatures(fm_train_real)
dfeats_test=NumpyFeatures(fm_test_real)
dlabels=BinaryLabels(label_train_twoclass)
print feats_train.get_computed_dot_feature_matrix()
print dfeats_train.get_computed_dot_feature_matrix()
svm=LibLinear(C, feats_train, labels)
svm.set_liblinear_solver_type(L2R_L2LOSS_SVC_DUAL)
svm.set_epsilon(epsilon)
svm.set_bias_enabled(True)
svm.train()
svm.set_features(feats_test)
svm.apply().get_labels()
predictions = svm.apply()
dfeats_train.__disown__()
dfeats_train.parallel.set_num_threads(1)
dsvm=LibLinear(C, dfeats_train, dlabels)
dsvm.set_liblinear_solver_type(L2R_L2LOSS_SVC_DUAL)
dsvm.set_epsilon(epsilon)
dsvm.set_bias_enabled(True)
dsvm.train()
dfeats_test.__disown__()
dfeats_test.parallel.set_num_threads(1)
dsvm.set_features(dfeats_test)
dsvm.apply().get_labels()
dpredictions = dsvm.apply()
return predictions, svm, predictions.get_labels()
示例11: kernel_anova_modular
def kernel_anova_modular (fm_train_real=traindat,fm_test_real=testdat,cardinality=2, size_cache=10):
from shogun.Kernel import ANOVAKernel
from shogun.Features import RealFeatures
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=ANOVAKernel(feats_train, feats_train, cardinality, size_cache)
for i in range(0,feats_train.get_num_vectors()):
for j in range(0,feats_train.get_num_vectors()):
k1 = kernel.compute_rec1(i,j)
k2 = kernel.compute_rec2(i,j)
#if abs(k1-k2) > 1e-10:
# print "|%s|%s|" % (k1, k2)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train, km_test, kernel
示例12: preprocessor_randomfouriergausspreproc_modular
def preprocessor_randomfouriergausspreproc_modular (fm_train_real=traindat,fm_test_real=testdat,width=1.4,size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.Preprocessor import RandomFourierGaussPreproc
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preproc=RandomFourierGaussPreproc()
preproc.init(feats_train)
feats_train.add_preprocessor(preproc)
feats_train.apply_preprocessor()
feats_test.add_preprocessor(preproc)
feats_test.apply_preprocessor()
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
示例13: preproc_prunevarsubmean_modular
def preproc_prunevarsubmean_modular(fm_train_real=traindat, fm_test_real=testdat, width=1.4, size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import PruneVarSubMean
feats_train = RealFeatures(fm_train_real)
feats_test = RealFeatures(fm_test_real)
preproc = PruneVarSubMean()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
kernel = Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train = kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test = kernel.get_kernel_matrix()
return km_train, km_test, kernel
示例14: preprocessor_normone_modular
def preprocessor_normone_modular (fm_train_real=traindat,fm_test_real=testdat,width=1.4,size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.Preprocessor import NormOne
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preprocessor=NormOne()
preprocessor.init(feats_train)
feats_train.add_preprocessor(preprocessor)
feats_train.apply_preprocessor()
feats_test.add_preprocessor(preprocessor)
feats_test.apply_preprocessor()
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
示例15: preproc_logplusone_modular
def preproc_logplusone_modular(fm_train_real=traindat, fm_test_real=testdat, width=1.4, size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import LogPlusOne
feats_train = RealFeatures(fm_train_real)
feats_test = RealFeatures(fm_test_real)
preproc = LogPlusOne()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
kernel = Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train = kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test = kernel.get_kernel_matrix()
return km_train, km_test, kernel