本文整理汇总了C++中SVM类的典型用法代码示例。如果您正苦于以下问题:C++ SVM类的具体用法?C++ SVM怎么用?C++ SVM使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SVM类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: trainSVM
static void trainSVM(SVM &svm, Mat data, Mat lab, int kernel, int type, float C, float gamma)
{
if (data.type() != CV_32FC1)
qFatal("Expected single channel floating point training data.");
CvSVMParams params;
params.kernel_type = kernel;
params.svm_type = type;
params.p = 0.1;
params.nu = 0.5;
if ((C == -1) || ((gamma == -1) && (kernel == CvSVM::RBF))) {
try {
svm.train_auto(data, lab, Mat(), Mat(), params, 5);
} catch (...) {
qWarning("Some classes do not contain sufficient examples or are not discriminative enough for accurate SVM classification.");
svm.train(data, lab);
}
} else {
params.C = C;
params.gamma = gamma;
svm.train(data, lab, Mat(), Mat(), params);
}
CvSVMParams p = svm.get_params();
qDebug("SVM C = %f Gamma = %f Support Vectors = %d", p.C, p.gamma, svm.get_support_vector_count());
}
示例2: TEST
TEST(ML_SVM, throw_exception_when_save_untrained_model)
{
SVM svm;
string filename = tempfile("svm.xml");
ASSERT_THROW(svm.save(filename.c_str()), Exception);
remove(filename.c_str());
}
示例3: deepCopyFrom
bool SVM::deepCopyFrom(const Classifier *classifier){
if( classifier == NULL ) return false;
if( this->getClassifierType() == classifier->getClassifierType() ){
SVM *ptr = (SVM*)classifier;
this->clear();
//SVM variables
this->problemSet = false;
this->model = ptr->deepCopyModel();
this->deepCopyParam( ptr->param, this->param );
this->numInputDimensions = ptr->numInputDimensions;
this->kFoldValue = ptr->kFoldValue;
this->classificationThreshold = ptr->classificationThreshold;
this->crossValidationResult = ptr->crossValidationResult;
this->useAutoGamma = ptr->useAutoGamma;
this->useCrossValidation = ptr->useCrossValidation;
//Classifier variables
return copyBaseVariables( classifier );
}
return false;
}
示例4: TEST
// Tests the default constructor
TEST(SVM, Constructor) {
SVM svm;
//Check the type matches
EXPECT_TRUE( svm.getClassifierType() == SVM::getId() );
//Check the module is not trained
EXPECT_TRUE( !svm.getTrained() );
}
示例5: columbiaTest
void columbiaTest(int testId = 0)
{
CascadeClassifier classifier;
classifier.load("haarcascades/haarcascade_frontalface_alt_tree.xml");
ShapePredictor predictor;
predictor.load("model/helen.txt");
PCA pca;
FileStorage fs("model/pca.xml", FileStorage::READ);
fs["mean"] >> pca.mean;
fs["eigenvals"] >> pca.eigenvalues;
fs["eigenvecs"] >> pca.eigenvectors;
fs.release();
/*LDA lda;
lda.load("model/lda.xml");*/
SVM svm;
svm.load("model/svm.xml");
cout << "\nmodel loaded" << endl;
// test prediction
cout << "\nbegin test" << endl;
int corr = 0, total = 0;
Mat_<float> labels, multihog, ldmks;
collectData(testId, classifier, predictor,
labels, multihog, ldmks);
for (int i = 0; i < multihog.rows; i++) {
Mat_<float> pcaVec = pca.project(multihog.row(i));
Mat_<float> datVec(1, pcaVec.cols + ldmks.cols);
for (int j = 0; j < pcaVec.cols; j++)
datVec(0, j) = pcaVec(0, j);
for (int j = 0; j < ldmks.cols; j++)
datVec(0, j + pcaVec.cols) = ldmks(i, j);
//Mat_<float> ldaVec = lda.project(datVec);
float pred = svm.predict(datVec);
if ((int)pred == (int)labels(i, 0))
corr++;
total++;
}
cout << "testId = " << testId << endl;
cout << "corr = " << corr << " , total = " << total << endl;
cout << "percentage: " << (double)corr / total << endl;
ofstream fout("data/testId" + to_string(testId) + ".txt");
fout << "corr = " << corr << " , total = " << total << endl;
fout << "percentage: " << (double)corr / total << endl;
fout.close();
}
示例6: main
int main(int argn, char *argv[])
{
//SVM_verbose = true;
seed_random();
SVM * i = new SVM(argv[1]);
double result = i->run();
if isnan(result)
cout << "WARNING: RESULT IS NAN\n";
cout << result << endl;
delete i;
}
示例7: columbiaTrain
void columbiaTrain(int testId = 0)
{
CascadeClassifier classifier;
classifier.load("haarcascades/haarcascade_frontalface_alt_tree.xml");
ShapePredictor predictor;
predictor.load("model/helen.txt");
cout << "face & shape detector loaded\n" << endl;
FileStorage fs;
Mat_<float> labels, multihog, ldmks;
for (int subjId = 1; subjId <= 56; subjId++) {
if (subjId == testId) continue;
collectData(subjId, classifier, predictor,
labels, multihog, ldmks);
}
cout << "multihog.rows = " << multihog.rows << endl;
cout << "multihog.cols = " << multihog.cols << endl;
// PCA
cout << "\nbegin PCA" << endl;
int pcaComp = 400;
PCA pca(multihog, Mat(), CV_PCA_DATA_AS_ROW, pcaComp);
fs.open("model/pca.xml", FileStorage::WRITE);
fs << "mean" << pca.mean;
fs << "eigenvals" << pca.eigenvalues;
fs << "eigenvecs" << pca.eigenvectors;
fs.release();
cout << "PCA complete" << endl;
Mat_<float> pcaMat = pca.project(multihog);
cout << "pcaMat.rows = " << pcaMat.rows << endl;
cout << "pcaMat.cols = " << pcaMat.cols << endl;
Mat_<float> dataMat(multihog.rows, pcaMat.cols + ldmks.cols);
for (int i = 0; i < multihog.rows; i++) {
for (int j = 0; j < pcaMat.cols; j++)
dataMat(i, j) = pcaMat(i, j);
for (int j = 0; j < ldmks.cols; j++)
dataMat(i, j + pcaMat.cols) = ldmks(i, j);
}
// SVM
cout << "\ntrain SVM" << endl;
SVMParams params;
params.svm_type = SVM::C_SVC;
params.kernel_type = SVM::RBF;
SVM svm;
svm.train_auto(dataMat, labels, Mat(), Mat(), params);
svm.save("model/svm.xml");
cout << "SVM saved!\n" << endl;
}
示例8: runSVM
double runSVM(char const *file)
{
char *oldLocale = setlocale(LC_ALL, NULL);
SVM_verbose = false;
setlocale(LC_ALL, "C");
seed_random();
SVM * i = new SVM(file);
double result = i->run();
delete i;
setlocale(LC_ALL, oldLocale);
if isnan(result)
cout << "WARNING: RESULT IS NAN\n";
return result;
}
示例9: ex_model_norm
bool ex_model_norm(void *arg) {
Trainer::SetLogLevel(SSI_LOG_LEVEL_DEBUG);
ssi_size_t n_classes = 4;
ssi_size_t n_samples = 50;
ssi_size_t n_streams = 1;
ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
SampleList strain;
SampleList sdevel;
SampleList stest;
ModelTools::CreateTestSamples(strain, n_classes, n_samples, n_streams, train_distr, "user");
ModelTools::CreateTestSamples(sdevel, n_classes, n_samples, n_streams, train_distr, "user");
ModelTools::CreateTestSamples(stest, 1, n_samples * n_classes, n_streams, test_distr, "user");
ssi_char_t string[SSI_MAX_CHAR];
for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
ssi_sprint(string, "class%02d", n_class);
stest.addClassName(string);
}
// train svm
{
SVM *model = ssi_create(SVM, 0, true);
model->getOptions()->seed = 1234;
Trainer trainer(model);
ISNorm::Params params;
ISNorm::ZeroParams(params, ISNorm::METHOD::ZSCORE);
trainer.setNormalization(¶ms);
trainer.train(strain);
trainer.save("svm+norm");
}
// evaluation
{
Trainer trainer;
Trainer::Load(trainer, "svm+norm");
Evaluation eval;
eval.eval(&trainer, sdevel);
eval.print();
trainer.cluster(stest);
ModelTools::PlotSamples(stest, "svm (external normalization)", ssi_rect(650,0,400,400));
}
return true;
}
示例10: loadSVM
static void loadSVM(SVM &svm, QDataStream &stream)
{
// Copy local file contents from stream
QByteArray data;
stream >> data;
// Create local file
QTemporaryFile tempFile(QDir::tempPath()+"/SVM");
tempFile.open();
tempFile.write(data);
tempFile.close();
// Load SVM from local file
svm.load(qPrintable(tempFile.fileName()));
}
示例11: main
int main(int argc, char** argv)
{
/*
Equation eq;
eq.theta0 = -1;
eq.theta[0] = 2;
Equation eq1;
eq.roundoff(eq1);
std::cout << eq << std::endl;
std::cout << eq1 << std::endl;
return 0;
*/
if (argc < 1) {
std::cout << "Arguments less than 2.\n";
exit(-1);
}
if (argc >= 3) {
minv = atoi(argv[1]);
maxv = atoi(argv[2]);
}
Solution inputs;
init_gsets();
srand(time(NULL)); // initialize seed for rand() function
int rnd;
bool b_similar_last_time = false;
bool b_converged = false;
bool b_svm_i = false;
Equation* p = NULL;
int pre_positive_size = 0, pre_negative_size = 0; // , pre_question_size = 0;
//int cur_positive_size = 0, cur_negative_size = 0; // , cur_question_size = 0;
//Start SVM training
SVM* svm = new SVM(print_null);
//svm->problem.x = (svm_node**)(training_set);
//svm->problem.y = training_label;
for (rnd = 1; rnd <= max_iter; rnd++) {
svm->main_equation = NULL;
init_svm:
std::cout << "[" << rnd << "]SVM-----------------------------------------------" << "-------------------------------------------------------------" << std::endl;
if (rnd == 1) {
/*
* The first round is very special, so we put this round apart with its following rounds.
* 1> We used random values as inputs for program executions in the first round.
* 2> We need to make sure there are at last two classes of generated traces. "positive" and "negative"
*/
std::cout << "\t(1) execute programs... [" << init_exes + random_exes << "] {";
for (int i = 0; i < init_exes + random_exes; i++) {
Equation::linearSolver(NULL, inputs);
std::cout << inputs;
if (i < init_exes + random_exes - 1) std::cout << "|";
run_target(inputs);
}
std::cout << "}" << std::endl;
if (gsets[POSITIVE].traces_num() == 0 || gsets[NEGATIVE].traces_num() == 0) {
if (gsets[POSITIVE].traces_num() == 0) std::cout << "[0] Positive trace, execute program again." << std::endl;
if (gsets[NEGATIVE].traces_num() == 0) std::cout << "[0] Negative trace, execute program again." << std::endl;
goto init_svm;
}
}
else {
std::cout << "\t(1) execute programs...[" << after_exes + random_exes << "] {";
for (int i = 0; i < random_exes; i++) {
Equation::linearSolver(NULL, inputs);
std::cout << inputs;
std::cout << " | ";
run_target(inputs);
}
for (int i = 0; i < after_exes; i++) {
Equation::linearSolver(p, inputs);
std::cout << " | " << inputs;
run_target(inputs);
}
std::cout << "}" << std::endl;
}
std::cout << "\t(2) prepare training data... ";
svm->prepare_training_data(gsets, pre_positive_size, pre_negative_size);
std::cout << std::endl;
std::cout << "\t(3) start training... ";
svm->train();
std::cout << "|-->> ";
set_console_color(std::cout);
std::cout << *svm << std::endl;
unset_console_color(std::cout);
/*
* check on its own training data.
//.........这里部分代码省略.........
示例12: test
void test(set<int> &testSet, int code)
{
CascadeClassifier classifier;
classifier.load("haarcascades/haarcascade_frontalface_alt_tree.xml");
ShapePredictor predictor;
predictor.load("model/helen.txt");
PCA pca;
FileStorage fs("model/girl_pca.xml", FileStorage::READ);
fs["mean"] >> pca.mean;
fs["eigenvals"] >> pca.eigenvalues;
fs["eigenvecs"] >> pca.eigenvectors;
SVM svm;
svm.load("model/girl_svm.xml");
cout << "\nmodel loaded" << endl;
ifstream fin("img/labels.txt");
ofstream fout("data/out_" +
to_string(code) + ".txt");
VideoWriter writer("data/out.avi", 0, 10, Size(1920, 1080), true);
string line;
int corr = 0, total = 0;
while (getline(fin, line)) {
stringstream ss(line);
int frame, label;
ss >> frame >> label;
label -= 49;
if (testSet.find(frame) == testSet.end())
continue;
Mat vis = imread("img/" + to_string(frame) + ".jpg",
CV_LOAD_IMAGE_UNCHANGED);
Mat_<uchar> img;
cvtColor(vis, img, COLOR_BGR2GRAY);
BBox bbox = getTestBBox(img, classifier);
if (EmptyBox(bbox)) continue;
Mat_<double> shape = predictor(img, bbox);
Geom G; initGeom(shape, G);
Pose P; calcPose(G, P);
Mat_<uchar> lEye, rEye;
regularize(img, bbox, P, shape, lEye, rEye);
vector<float> lRlt;
vector<float> rRlt;
calcMultiHog(lEye, lRlt);
calcMultiHog(rEye, rRlt);
Mat_<float> pcaVec, ldmks;
vector<float> _hog2nd_vec;
for (int k = 0; k < lRlt.size(); k++)
_hog2nd_vec.push_back(lRlt[k]);
for (int k = 0; k < rRlt.size(); k++)
_hog2nd_vec.push_back(rRlt[k]);
Mat_<float> multihog = Mat_<float>(_hog2nd_vec).reshape(1, 1);
pcaVec = pca.project(multihog);
vector<float> _ldmks;
for (int i = 28; i < 48; i++) {
_ldmks.push_back((shape(i, 0) - bbox.cx) / bbox.w);
_ldmks.push_back((shape(i, 1) - bbox.cy) / bbox.h);
}
float mouthx = (shape(51, 0) + shape(62, 0) + shape(66, 0) + shape(57, 0)) / 4;
float mouthy = (shape(51, 1) + shape(62, 1) + shape(66, 1) + shape(57, 1)) / 4;
_ldmks.push_back((mouthx - bbox.cx) / bbox.w);
_ldmks.push_back((mouthy - bbox.cy) / bbox.h);
float maxVal = *std::max_element(_ldmks.begin(), _ldmks.end());
for (int i = 0; i < _ldmks.size(); i++) _ldmks[i] *= 1.0 / maxVal; // scale to [-1, 1]
ldmks = Mat_<float>(_ldmks).reshape(1, 1);
Mat_<float> sample(1, pcaVec.cols + ldmks.cols);
for (int j = 0; j < pcaVec.cols; j++)
sample(0, j) = pcaVec(0, j);
for (int j = 0; j < ldmks.cols; j++)
sample(0, j + pcaVec.cols) = ldmks(0, j);
int pred = svm.predict(sample);
if (pred == label) corr++;
total++;
fout << frame << ' ' << label << ' ' << pred << endl;
string s1, s2;
switch (label) {
case 0: s1 = "annotation: Eye"; break;
case 1: s1 = "annotation: Face"; break;
case 2: s1 = "annotation: NOF"; break;
}
switch (pred) {
case 0: s2 = "prediction: Eye"; break;
case 1: s2 = "prediction: Face"; break;
case 2: s2 = "prediction: NOF"; break;
}
//.........这里部分代码省略.........
示例13: train
void train(set<int> &testSet)
{
CascadeClassifier classifier;
classifier.load("haarcascades/haarcascade_frontalface_alt_tree.xml");
ShapePredictor predictor;
predictor.load("model/helen.txt");
cout << "face & shape detector loaded\n" << endl;
ifstream fin("img/labels.txt");
string line;
Mat_<float> labels, multihog, landmarks;
while (getline(fin, line)) {
stringstream ss(line);
int frame, label;
ss >> frame >> label;
label -= 49;
if (testSet.find(frame) != testSet.end())
continue;
Mat_<uchar> img = imread("img/" + to_string(frame) + ".jpg", 0);
BBox bbox = getTestBBox(img, classifier);
if (EmptyBox(bbox)) continue;
Mat_<float> lbl(1, 1);
lbl(0, 0) = label;
labels.push_back(lbl);
Mat_<double> shape = predictor(img, bbox);
Geom G; initGeom(shape, G);
Pose P; calcPose(G, P);
Mat_<uchar> lEye, rEye;
regularize(img, bbox, P, shape, lEye, rEye);
vector<float> lRlt;
vector<float> rRlt;
calcMultiHog(lEye, lRlt);
calcMultiHog(rEye, rRlt);
vector<float> _hog2nd_vec;
for (int k = 0; k < lRlt.size(); k++)
_hog2nd_vec.push_back(lRlt[k]);
for (int k = 0; k < rRlt.size(); k++)
_hog2nd_vec.push_back(rRlt[k]);
Mat_<float> mhog = Mat_<float>(_hog2nd_vec).reshape(1, 1);
multihog.push_back(mhog);
vector<float> _ldmks;
for (int i = 28; i < 48; i++) {
_ldmks.push_back((shape(i, 0) - bbox.cx) / bbox.w);
_ldmks.push_back((shape(i, 1) - bbox.cy) / bbox.h);
}
float mouthx = (shape(51, 0) + shape(62, 0) + shape(66, 0) + shape(57, 0)) / 4;
float mouthy = (shape(51, 1) + shape(62, 1) + shape(66, 1) + shape(57, 1)) / 4;
_ldmks.push_back((mouthx - bbox.cx) / bbox.w);
_ldmks.push_back((mouthy - bbox.cy) / bbox.h);
float maxVal = *std::max_element(_ldmks.begin(), _ldmks.end());
for (int i = 0; i < _ldmks.size(); i++) _ldmks[i] *= 1.0 / maxVal; // scale to [-1, 1]
Mat_<float> ldmks = Mat_<float>(_ldmks).reshape(1, 1);
landmarks.push_back(ldmks);
}
// PCA
cout << "\nbegin PCA" << endl;
int pcaComp = 400;
PCA pca(multihog, Mat(), CV_PCA_DATA_AS_ROW, pcaComp);
FileStorage fs("model/girl_pca.xml", FileStorage::WRITE);
fs << "mean" << pca.mean;
fs << "eigenvals" << pca.eigenvalues;
fs << "eigenvecs" << pca.eigenvectors;
fs.release();
cout << "PCA complete" << endl;
Mat_<float> pcaMat = pca.project(multihog);
cout << "pcaMat.rows = " << pcaMat.rows << endl;
cout << "pcaMat.cols = " << pcaMat.cols << endl;
Mat_<float> dataMat(multihog.rows, pcaMat.cols + landmarks.cols);
for (int i = 0; i < multihog.rows; i++) {
for (int j = 0; j < pcaMat.cols; j++)
dataMat(i, j) = pcaMat(i, j);
for (int j = 0; j < landmarks.cols; j++)
dataMat(i, j + pcaMat.cols) = landmarks(i, j);
}
// SVM
cout << "\ntrain SVM" << endl;
SVMParams params;
params.svm_type = SVM::C_SVC;
params.kernel_type = SVM::RBF;
SVM svm;
svm.train_auto(dataMat, labels, Mat(), Mat(), params);
svm.save("model/girl_svm.xml");
cout << "SVM saved!\n" << endl;
}
示例14: ex_model
bool ex_model(void *arg) {
Trainer::SetLogLevel (SSI_LOG_LEVEL_DEBUG);
ssi_size_t n_classes = 4;
ssi_size_t n_samples = 50;
ssi_size_t n_streams = 1;
ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
SampleList strain;
SampleList sdevel;
SampleList stest;
ModelTools::CreateTestSamples (strain, n_classes, n_samples, n_streams, train_distr, "user");
ModelTools::CreateTestSamples (sdevel, n_classes, n_samples, n_streams, train_distr, "user");
ModelTools::CreateTestSamples (stest, 1, n_samples * n_classes, n_streams, test_distr, "user");
ssi_char_t string[SSI_MAX_CHAR];
for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
ssi_sprint (string, "class%02d", n_class);
stest.addClassName (string);
}
// train svm
{
SVM *model = ssi_create(SVM, 0, true);
model->getOptions()->seed = 1234;
Trainer trainer(model);
trainer.train(strain);
trainer.save("svm");
}
// evaluation
{
Trainer trainer;
Trainer::Load(trainer, "svm");
Evaluation eval;
eval.eval(&trainer, sdevel);
eval.print();
trainer.cluster(stest);
ModelTools::PlotSamples(stest, "svm (internal normalization)", ssi_rect(650, 0, 400, 400));
}
// train knn
{
KNearestNeighbors *model = ssi_create(KNearestNeighbors, 0, true);
model->getOptions()->k = 5;
//model->getOptions()->distsum = true;
Trainer trainer (model);
trainer.train (strain);
trainer.save ("knn");
}
// evaluation
{
Trainer trainer;
Trainer::Load (trainer, "knn");
Evaluation eval;
eval.eval (&trainer, sdevel);
eval.print ();
trainer.cluster (stest);
ModelTools::PlotSamples(stest, "knn", ssi_rect(650, 0, 400, 400));
}
// train naive bayes
{
NaiveBayes *model = ssi_create(NaiveBayes, 0, true);
model->getOptions()->log = true;
Trainer trainer (model);
trainer.train (strain);
trainer.save ("bayes");
}
// evaluation
{
Trainer trainer;
Trainer::Load (trainer, "bayes");
Evaluation eval;
eval.eval (&trainer, sdevel);
eval.print ();
trainer.cluster (stest);
ModelTools::PlotSamples(stest, "bayes", ssi_rect(650, 0, 400, 400));
}
// training
{
LDA *model = ssi_create(LDA, "lda", true);
Trainer trainer (model);
trainer.train (strain);
model->print();
trainer.save ("lda");
}
// evaluation
{
Trainer trainer;
Trainer::Load (trainer, "lda");
Evaluation eval;
//.........这里部分代码省略.........
示例15: main
int main(int argc, char **argv) {
string nameImage(argv[1]);
//Parameters of the simulation for the drive dataset
bool includeOddOrders = false;
bool includeEvenOrders = true;
bool includeOrder0 = false;
float sigmaImgs = 2.0;
string directory = getDirectoryFromPath(nameImage);
string xFile =
"/home/ggonzale/mount/cvlabfiler/drive/training/svm_1.000e+01_1.000e-02.svm";
double sk = 1e-02;
double C = 10;
/*
//Paramters for road images
bool includeOddOrders = true;
bool includeEvenOrders = true;
bool includeOrder0 = false;
string directory = getDirectoryFromPath(nameImage);
float sigmaImgs = 3.0;
string xFile =
"/home/ggonzale/mount/cvlabfiler/roads/training/svm_1.000e+01_1.000e+01.svm";
double sk = 1e+01;
double C = 10;
*/
string imageName =
nameImage;
string outputName =
directory + "/out.jpg";
string imageThetaN =
directory + "/theta.jpg";
if(!fileExists(imageThetaN)){
Image<float>* image = new Image<float>(imageName);
// image->computeHessian(sigmaImgs, directory+"/l1.jpg", directory + "/l2.jpg",
// true, directory + "theta.jpg");
}
string imageHessianN =
directory + "/l1.jpg";
double sigma = 1.0/(sk*sk);
SteerableFilter2DM* stf =
new SteerableFilter2DM(imageName, 4, sigmaImgs, outputName,
includeOddOrders, includeEvenOrders, includeOrder0);
stf->result->put_all(0.0);
// int xInit = 575;
//int yInit = 525;
//int xEnd = 650;
//int yEnd = 549;
// int xInit = 489;
// int yInit = 509;
// int xEnd = 769;
// int yEnd = 765;
int xInit = 0;
int yInit = 0;
int xEnd = stf->image->width;
int yEnd = stf->image->height;
Image< float >* theta = new Image<float>(imageThetaN);
Image< float >* hessian = new Image<float>(imageHessianN);
int nSupportVectors = 0;
int dimensionOfSupportVectors = 0;
Allocator *allocator = new Allocator;
SVM *svm = NULL;
Kernel *kernel = NULL;
kernel = new(allocator) GaussianKernel((double)sigma);
svm = new(allocator) SVMClassification(kernel);
DiskXFile* model = new(allocator) DiskXFile(xFile.c_str(),"r");
svm->loadXFile(model);
svm->setROption("C", C);
svm->setROption("cache size", 100);
nSupportVectors = svm->n_support_vectors;
dimensionOfSupportVectors = svm->sv_sequences[0]->frame_size;
vector< vector< double > > svectors =
allocateMatrix(nSupportVectors, dimensionOfSupportVectors);
vector< double > alphas(nSupportVectors);
for(int i = 0; i < nSupportVectors; i++){
alphas[i] = svm->sv_alpha[i];
for(int j = 0; j < dimensionOfSupportVectors; j++){
svectors[i][j] = svm->sv_sequences[i]->frames[0][j];
}
}
//saveMatrix(svectors, "supportVectors.txt");
//.........这里部分代码省略.........