本文整理汇总了C++中CvDTree::calc_error方法的典型用法代码示例。如果您正苦于以下问题:C++ CvDTree::calc_error方法的具体用法?C++ CvDTree::calc_error怎么用?C++ CvDTree::calc_error使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类CvDTree
的用法示例。
在下文中一共展示了CvDTree::calc_error方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
int main()
{
const int train_sample_count = 300;
//#define LEPIOTA
#ifdef LEPIOTA
const char* filename = "../../../OpenCV_SVN/samples/c/agaricus-lepiota.data";
#else
const char* filename = "../../../OpenCV_SVN/samples/c/waveform.data";
#endif
CvDTree dtree;
CvBoost boost;
CvRTrees rtrees;
CvERTrees ertrees;
CvMLData data;
CvTrainTestSplit spl( train_sample_count );
data.read_csv( filename );
#ifdef LEPIOTA
data.set_response_idx( 0 );
#else
data.set_response_idx( 21 );
data.change_var_type( 21, CV_VAR_CATEGORICAL );
#endif
data.set_train_test_split( &spl );
printf("======DTREE=====\n");
dtree.train( &data, CvDTreeParams( 10, 2, 0, false, 16, 0, false, false, 0 ));
print_result( dtree.calc_error( &data, CV_TRAIN_ERROR), dtree.calc_error( &data ), dtree.get_var_importance() );
#ifdef LEPIOTA
printf("======BOOST=====\n");
boost.train( &data, CvBoostParams(CvBoost::DISCRETE, 100, 0.95, 2, false, 0));
print_result( boost.calc_error( &data, CV_TRAIN_ERROR ), boost.calc_error( &data ), 0 );
#endif
printf("======RTREES=====\n");
rtrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
print_result( rtrees.calc_error( &data, CV_TRAIN_ERROR), rtrees.calc_error( &data ), rtrees.get_var_importance() );
printf("======ERTREES=====\n");
ertrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
print_result( ertrees.calc_error( &data, CV_TRAIN_ERROR), ertrees.calc_error( &data ), ertrees.get_var_importance() );
return 0;
}
示例2: main
int main()
{
const int train_sample_count = 300;
bool is_regression = false;
const char* filename = "data/waveform.data";
int response_idx = 21;
CvMLData data;
CvTrainTestSplit spl( train_sample_count );
if(data.read_csv(filename) != 0)
{
printf("couldn't read %s\n", filename);
exit(0);
}
data.set_response_idx(response_idx);
data.change_var_type(response_idx, CV_VAR_CATEGORICAL);
data.set_train_test_split( &spl );
const CvMat* values = data.get_values();
const CvMat* response = data.get_responses();
const CvMat* missing = data.get_missing();
const CvMat* var_types = data.get_var_types();
const CvMat* train_sidx = data.get_train_sample_idx();
const CvMat* var_idx = data.get_var_idx();
CvMat*response_map;
CvMat*ordered_response = cv_preprocess_categories(response, var_idx, response->rows, &response_map, NULL);
int num_classes = response_map->cols;
CvDTree dtree;
printf("======DTREE=====\n");
CvDTreeParams cvd_params( 10, 1, 0, false, 16, 0, false, false, 0);
dtree.train( &data, cvd_params);
print_result( dtree.calc_error( &data, CV_TRAIN_ERROR), dtree.calc_error( &data, CV_TEST_ERROR ), dtree.get_var_importance() );
#if 0
/* boosted trees are only implemented for two classes */
printf("======BOOST=====\n");
CvBoost boost;
boost.train( &data, CvBoostParams(CvBoost::DISCRETE, 100, 0.95, 2, false, 0));
print_result( boost.calc_error( &data, CV_TRAIN_ERROR ), boost.calc_error( &data, CV_TEST_ERROR), 0 );
#endif
printf("======RTREES=====\n");
CvRTrees rtrees;
rtrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
print_result( rtrees.calc_error( &data, CV_TRAIN_ERROR), rtrees.calc_error( &data, CV_TEST_ERROR ), rtrees.get_var_importance() );
printf("======ERTREES=====\n");
CvERTrees ertrees;
ertrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
print_result( ertrees.calc_error( &data, CV_TRAIN_ERROR), ertrees.calc_error( &data, CV_TEST_ERROR ), ertrees.get_var_importance() );
printf("======GBTREES=====\n");
CvGBTrees gbtrees;
CvGBTreesParams gbparams;
gbparams.loss_function_type = CvGBTrees::DEVIANCE_LOSS; // classification, not regression
gbtrees.train( &data, gbparams);
//gbt_print_error(&gbtrees, values, response, response_idx, train_sidx);
print_result( gbtrees.calc_error( &data, CV_TRAIN_ERROR), gbtrees.calc_error( &data, CV_TEST_ERROR ), 0);
printf("======KNEAREST=====\n");
CvKNearest knearest;
//bool CvKNearest::train( const Mat& _train_data, const Mat& _responses,
// const Mat& _sample_idx, bool _is_regression,
// int _max_k, bool _update_base )
bool is_classifier = var_types->data.ptr[var_types->cols-1] == CV_VAR_CATEGORICAL;
assert(is_classifier);
int max_k = 10;
knearest.train(values, response, train_sidx, is_regression, max_k, false);
CvMat* new_response = cvCreateMat(response->rows, 1, values->type);
//print_types();
//const CvMat* train_sidx = data.get_train_sample_idx();
knearest.find_nearest(values, max_k, new_response, 0, 0, 0);
print_result(knearest_calc_error(values, response, new_response, train_sidx, is_regression, CV_TRAIN_ERROR),
knearest_calc_error(values, response, new_response, train_sidx, is_regression, CV_TEST_ERROR), 0);
printf("======== RBF SVM =======\n");
//printf("indexes: %d / %d, responses: %d\n", train_sidx->cols, var_idx->cols, values->rows);
CvMySVM svm1;
CvSVMParams params1 = CvSVMParams(CvSVM::C_SVC, CvSVM::RBF,
/*degree*/0, /*gamma*/1, /*coef0*/0, /*C*/1,
/*nu*/0, /*p*/0, /*class_weights*/0,
cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 1000, FLT_EPSILON));
//svm1.train(values, response, train_sidx, var_idx, params1);
svm1.train_auto(values, response, var_idx, train_sidx, params1);
svm_print_error(&svm1, values, response, response_idx, train_sidx);
printf("======== Linear SVM =======\n");
CvMySVM svm2;
CvSVMParams params2 = CvSVMParams(CvSVM::C_SVC, CvSVM::LINEAR,
/*degree*/0, /*gamma*/1, /*coef0*/0, /*C*/1,
/*nu*/0, /*p*/0, /*class_weights*/0,
//.........这里部分代码省略.........