本文整理汇总了Python中svmutil.svm_train函数的典型用法代码示例。如果您正苦于以下问题:Python svm_train函数的具体用法?Python svm_train怎么用?Python svm_train使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了svm_train函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: TrainSvmRbf
def TrainSvmRbf(Y, X, sweep_c=range(-5,5), sweep_g=range(-5,5)):
num_negatives = float(Y.count(-1))
num_positives = float(Y.count(1))
best_c = -1
best_g = -1
best_acc = -1
for c_pow in sweep_c:
for g_pow in sweep_g:
current_c = np.power(2.0,c_pow)
current_g = np.power(2.0,g_pow)
prob = svm.svm_problem(Y,X)
param = svm.svm_parameter('-v 5 -t 2 -c %f -g %f -w-1 %f -w1 %f -q' % (current_c,
current_g,
100/num_negatives,
100/num_positives))
current_acc = svm.svm_train(prob, param)
print 'c = %f, g = %f, cv acc = %f' % (current_c, current_g, current_acc)
if best_acc < current_acc:
best_acc = current_acc
best_c = current_c
best_g = current_g
prob = svm.svm_problem(Y,X)
param = svm.svm_parameter('-t 2 -c %f -g %f -w-1 %f -w1 %f -q' % (best_c, best_g,
100/num_negatives,
100/num_positives))
svm_model = svm.svm_train(prob, param)
p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')
pdb.set_trace()
return svm_model
示例2: calculate_race
def calculate_race():
correct = 0
answers = []
input = []
count = 0
for d in data:
answers.append(question2b_race_truth.truth[count])
input.append(d)
if count == 49:
break
count += 1
prob = svmutil.svm_problem(answers, input)
param = svmutil.svm_parameter('-t 2 -c 4')
param.cross_validation = 1
param.nr_fold = 10
cv = svmutil.svm_train(prob, param)
param = svmutil.svm_parameter('-t 2 -c 4')
m = svmutil.svm_train(prob, param)
count = 0
for d in data:
if count < 50:
count += 1
continue
else:
x0, max_idx = gen_svm_nodearray(d)
p = libsvm.svm_predict(m, x0)
if p == question2b_race_truth.truth[count]:
correct += 1
count += 1
return cv, correct / float(50) * 100
示例3: TrainSvmLinear
def TrainSvmLinear(Y, X, sweep_c=range(-2,8)):
num_positives = float(Y.count(1))
num_negatives = float(Y.count(-1))
best_c = -1
best_acc = -1
for c_pow in sweep_c:
current_c = np.power(2.0,c_pow)
prob = svm.svm_problem(Y,X)
param = svm.svm_parameter('-v 5 -t 0 -c %f -w-1 %f -w1 %f -q' % (current_c,
100/num_negatives,
100/num_positives))
current_acc = svm.svm_train(prob, param)
print '%f, %f' % (current_c, current_acc)
if best_acc < current_acc:
best_acc = current_acc
best_c = current_c
# recompute accuracy
param = svm.svm_parameter('-t 0 -c %f -w-1 %f -w1 %f -q' % (best_c,
100/num_negatives,
100/num_positives))
svm_model = svm.svm_train(prob, param)
p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')
prob = svm.svm_problem(Y,X)
param = svm.svm_parameter('-t 0 -c %f -w-1 %f -w1 %f -q' % (best_c,
100/num_negatives,
100/num_positives))
svm_model = svm.svm_train(prob, param)
p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')
pdb.set_trace()
return svm_model
示例4: train
def train(self):
for i in range(4):
self.convert()
#rbf
param1 = svmutil.svm_parameter("-t 2 -b 1 -c 1 -g 0.001")
param2 = svmutil.svm_parameter("-t 2 -b 1 -c 0.1 -g 0.001")
self.mr.append(svmutil.svm_train(self.problem[0], param1))#hist
self.mr.append(svmutil.svm_train(self.problem[1], param2))#vector
#linear
param3 = svmutil.svm_parameter("-t 0 -b 1 -c 0.1")
param4 = svmutil.svm_parameter("-t 0 -b 1 -c 0.01")
self.ml.append(svmutil.svm_train(self.problem[0], param3))#hist
self.ml.append(svmutil.svm_train(self.problem[1], param4))#vector
self.images = self.images[1:]+self.images[:1]
示例5: prob20
def prob20():
import random
gamma = [1, 10, 100, 1000, 10000]
chosen = {1:0, 10:0, 100:0, 1000:0, 10000:0}
for _ in range(100):
Eout = []
for g in gamma:
trainX, testX, trainy, testy = readdat()
mul_label_2_bin(trainy, testy, 0)
trainX = zip(trainX, trainy)
random.shuffle(trainX)
trainX, trainy = zip(*trainX)
valX = trainX[:1000]
valy = trainy[:1000]
trainX = trainX[1000:]
trainy = trainy[1000:]
m = svmutil.svm_train(trainy, trainX, '-s 0 -t 2 -c 0.1 -g %f -h 0'%(g))
p_label, p_acc, p_val = svmutil.svm_predict(valy, valX, m)
Eout.append(100.0 - p_acc[0])
chosen[gamma[Eout.index(min(Eout))]] += 1
print "prob20: ",
for k in chosen.keys():
print "gamma=%d:%d, " % (k, chosen[k]),
print ""
示例6: CrossValidate
def CrossValidate(Y, X, param, k_folds=5):
rand_idx = range(len(Y))
random.shuffle(rand_idx)
idx_groups = SplitIntoK(k_folds, rand_idx)
pos_acc = 0
neg_acc = 0
for i in range(k_folds):
test_idx = idx_groups[i]
exclude_test = [idx_groups[j] for j in range(len(idx_groups)) if i != j]
train_idx = list(chain(*exclude_test))
Y_test = [Y[test_i] for test_i in test_idx]
X_test = [X[test_i] for test_i in test_idx]
Y_train = [Y[train_i] for train_i in train_idx]
X_train = [X[train_i] for train_i in train_idx]
# recompute accuracy
prob = svm.svm_problem(Y_train,X_train)
svm_model = svm.svm_train(prob, param)
p_labs, p_acc, p_vals = svm.svm_predict(Y_test, X_test, svm_model, '-q')
tps = sum([1 for j in range(len(p_labs)) if (p_labs[j]==1 and Y_test[j]==1)])
fns = sum([1 for j in range(len(p_labs)) if (p_labs[j]==-1 and Y_test[j]==1)])
tns = sum([1 for j in range(len(p_labs)) if (p_labs[j]==-1 and Y_test[j]==-1)])
fps = sum([1 for j in range(len(p_labs)) if (p_labs[j]==1 and Y_test[j]==-1)])
pos_acc += tps / float(tps + fns)
neg_acc += tns / float(tns + fps)
pos_acc = pos_acc / k_folds
neg_acc = neg_acc / k_folds
return (pos_acc, neg_acc)
示例7: train
def train(cls, featuresets, params="-t 0 -q"):
"""Train a classifier using the given featuresets.
Args:
featuresets: List of featuresets.
params: Parameter string to pass to svmutil.svm_parameter.
Returns:
SvmClassifier object.
"""
all_features = set()
all_labels = set()
for featuredict, label in featuresets:
all_features.update(set(featuredict.keys()))
all_labels.add(label)
all_labels = sorted(all_labels)
all_features = sorted(all_features)
featureindex = dict(zip(all_features, range(1, len(all_features) + 1)))
labelindex = dict(zip(all_labels, range(1, len(all_labels) + 1)))
vectors, labels = cls.featuresets_to_svm(featureindex, labelindex,
featuresets)
prob = svmutil.svm_problem(labels, vectors)
param = svmutil.svm_parameter(params)
model = svmutil.svm_train(prob, param)
return cls(featureindex, labelindex, model)
示例8: main
def main(path, k):
prabs = []
lns = []
for kk in range(0,k-1):
testLabel = []
trainPoint = []
trainLabel = []
testPoint = []
wcCount = 0
for u in os.listdir(path):
if u[-2:] == 'WC':r
wcCount += 1
filePath = path+u
WC = pickle.load(open(filePath, 'rb'))
if wcCount % k == 0 + kk:
testLabel.append(int(u[1]))
testPoint.append(WC)
else:
trainLabel.append(int(u[1]))
trainPoint.append(WC)
lns.append(len(testLabel))
prob = svmutil.svm_problem(trainLabel, trainPoint)
param = svmutil.svm_parameter('-t 0 -c 4 -b 1 -q')
m = svmutil.svm_train(prob, param)
svmutil.svm_save_model('n.model', m)
p_label, p_acc, p_val = svmutil.svm_predict(testLabel, testPoint, m, '-b 1')
prabs.append(p_acc[0])
示例9: train_test
def train_test():
train_subdir = "data/train/"
test_subdir = "data/test/"
img_kinds = ["happy", "anger", "neutral", "surprise"]
models = {}
params = "-t 0 -c 3"
svm_params = { "happy": params,
"anger": params,
"neutral": params,
"surprise": params}
#train the models
print 'BUILDING TRAIN MODELS'
for img_kind in img_kinds:
print "\t" + img_kind
problem = build_problem(img_kind, train_subdir)
param = svm.svm_parameter(svm_params[img_kind])
models[img_kind] = svmutil.svm_train(problem, param)
print '================================'
#for each image in test set let's see what is the answe
total_count = 0
correct_count = 0
wrong_count = 0
print 'TESTING MODELS'
for img_kind in img_kinds:
images = glob.glob(test_subdir + "f_" + img_kind + "*.jpg")
for image in images:
print "\t" + image
image_data = cv.LoadImage(image)
# Let's see what are the results from the models
results = {}
for kind in img_kinds:
test_data = get_image_features(image_data, True, kind)
predict_input_data = []
predict_input_data.append(test_data)
# do svm query
(val, val_2, label) = svmutil.svm_predict([1] ,predict_input_data, models[kind])
results[kind] = label[0][0]
sorted_results = sorted(results.iteritems(), key=operator.itemgetter(1))
result = sorted_results[len(sorted_results)-1][0]
total_count += 1
if result == img_kind:
print 'YES :' + result
correct_count += 1
else:
print 'NO :' + result
print sorted_results
wrong_count += 1
print '-----------------------'
print '================================'
print "Total Pictures: " + str(total_count)
print "Correct: " + str(correct_count)
print "Wrong: " + str(wrong_count)
print "Accuracy: " + str(correct_count/float(total_count) * 100)
示例10: train
def train(self,x,y):
"""
training using y=list,x=dict
parameter = string of parameters
"""
prob=su.svm_problem(y,x)
para=""
para+= "-s %d -t %d -d %d -g %f -r %f -c %f -n %f -p %f -e %f -b %d" %\
(
self.type,
self.kernel,
self.degree,
self.gamma,
self.coef0,
self.c,
self.nu,
self.p,
self.eps,
self.prob
)
if(self.v!=0):
para+=" -v %d" % self.v
if(self.q!=0):
para+= " -q"
print para
para1=su.svm_parameter(para)
self.model=su.svm_train(prob,para1)
return True
示例11: __init__
def __init__(self,train_feature_file = TRAIN_FEATURE_FILE):
if os.path.exists(SAVED_MODEL):
self.model = svmutil.svm_load_model(SAVED_MODEL)
else:
y, x = svmutil.svm_read_problem(train_feature_file)
self.model = svmutil.svm_train(y, x, '-c 4')
svmutil.svm_save_model(SAVED_MODEL,self.model)
示例12: iqr_model_train
def iqr_model_train(matrix_kernel_train, labels_train, idx2clipid,
svm_para = '-w1 50 -t 4 -b 1 -c 1'):
"""
Light-weighted SVM learning module for online IQR
@param matrix_kernel_train: n-by-n square numpy array with kernel values
between training data
@param labels_train: row-wise labels of training data (1 or True indicates
positive, 0 or False otherwise
@param idx2clipid: idx2clipid(row_idx) returns the clipid for the 0-base row
in matrix
@param svm_para: (optional) SVM learning parameter
@rtype: dictionary with 'clipids_SV': list of clipids for support vectors
@return: output as a dictionary with 'clipids_SV'
"""
log = logging.getLogger('iqr_model_train')
# set training inputs
matrix_kernel_train = np.vstack((np.arange(1, len(matrix_kernel_train)+1),
matrix_kernel_train)).T
log.debug("Done matrix_kernel_train")
problem = svm.svm_problem(labels_train.tolist(), matrix_kernel_train.tolist(), isKernel=True)
log.debug("Done problem")
svm_param = svm.svm_parameter(svm_para)
log.debug("Done svm_param")
# train model
model = svmutil.svm_train(problem, svm_param)
log.debug("Done train model")
# release memory
del problem
del svm_param
log.debug("Done release memory")
# check learning failure
if model.l == 0:
raise Exception('svm model learning failure')
log.debug("Done checking learning failure (no failure)")
n_SVs = model.l
clipids_SVs = []
idxs_train_SVs = svmtools.get_SV_idxs_nonlinear_svm(model)
for i in range(n_SVs):
_idx_1base = idxs_train_SVs[i]
_idx_0base = _idx_1base - 1
clipids_SVs.append(idx2clipid[_idx_0base])
model.SV[i][0].value = i+1 # within SVM model, index needs to be 1-base
log.debug("Done collecting support vector IDs")
#svmutil.svm_save_model(filepath_model, model)
output = dict()
output['model'] = model
output['clipids_SVs'] = clipids_SVs
return output
示例13: train_test_model
def train_test_model(train_datafile, test_datafile):
from svmutil import svm_read_problem, svm_train, svm_predict
y,x = svm_read_problem(train_datafile)
m = svm_train(y,x,'-t 0 -e .01 -m 1000 -h 0')
y_test,x_test = svm_read_problem(test_datafile)
p_labs,p_acc,p_vals = svm_predict(y_test,x_test,m)
return p_labs, p_acc, p_vals
示例14: valid
def valid(self,datasets,opt,opp,method = fold,part_ids = None,seed = None,test_data = None):
if seed is None:
# If seed is not set. UNIX time is used as seed.
seed = time.time()
saving_seed = "%s/log/%s.log.seed" % (self._dir,self._name)
with open(saving_seed,"w") as fp:
# Save used seed value.
fp.write("seed:%f\n" % seed)
if part_ids is None:
part_ids = datasets.pids
groups = [(test,train) for test,train in method(part_ids,seed = seed)]
for cnt,pdtsts in enumerate(groups):
# cnt is number of cluster.
if test_data is None:
test = False
ltest,dtest,itest = test2svm_prob(datasets.mkTest(pdtsts[0]))
else:
test = True
ltest,dtest,itest = test2svm_prob(test_data.mkTest(test_data.pids))
print "start %s validation" % (cnt)
ptrn,itrain = train2svm_prob(datasets.mkTrain(pdtsts[1]))
#opt = svm.svm_parameter(opt)
model = svmutil.svm_train(ptrn,opt)
plbl,pacc,pval = svmutil.svm_predict(ltest,dtest,model,opp)
# create saving direcotry
#self._mkdir(cnt)
# create log files
self._save_log(itest,plbl,pval,cnt,test)
model_name = "%s/model/%s.model.%s" % (self._dir,self._name,cnt)
示例15: run_cross_validation
def run_cross_validation(dTrain,dTest):
'Work with Polynomal kernel with cross validation'
print '--run_cross_validation--'
print '-- 1 versus 5 with Q = 2 and Cross Validation--'
Cs = [0.0001,0.001,0.01,0.1,1]
Ecvs = [[],[],[],[],[]]
print '-- Train and Test --'
dTrain_shuffle = dTrain
# Try 100 runs with different partitions
for j in range(100):
# roll those dices
shuffle(dTrain_shuffle)
# Get data and formated vectors
dTrain_1vs5 = getDataOneVsOne(dTrain_shuffle,1,5)
X_train_1vs5,Y_train_1vs5 = get_svm_vector_format(dTrain_1vs5)
# Try all Cs with cross validation
for i in range(len(Cs)):
# Type = Polynomial. Degree = 2. Gamma 1.
# Coef = 1. C = Cs[i].Cross Validation at 10. be quiet
options = '-t 1 -d 2 -g 1 -r 1 -c '+str(Cs[i])+ ' -v 10 -q'
m = svm_train(Y_train_1vs5,X_train_1vs5,options)
Ecvs[i].append(100 - m)
# display
print
for i in range(len(Ecvs)):
print 'Ecv = %s \tfor C = %s'%(sum(Ecvs[i])/100.,Cs[i])
print