本文整理汇总了C++中CvRTrees::get_tree_count方法的典型用法代码示例。如果您正苦于以下问题:C++ CvRTrees::get_tree_count方法的具体用法?C++ CvRTrees::get_tree_count怎么用?C++ CvRTrees::get_tree_count使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类CvRTrees
的用法示例。
在下文中一共展示了CvRTrees::get_tree_count方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: mexFunction
/* Examines the values at each leaf node in order to see what the distribution of data
we put in is doing */
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) {
ASSERT_NUM_RHS_ARGS_EQUALS(1);
const mxArray* forest_ptr = prhs[0];
ASSERT_IS_POINTER(forest_ptr);
CvRTrees *forest = (CvRTrees *) unpack_pointer(forest_ptr);
// We are going to return a cell array with one cell per tree, so need this number
int num_trees = forest->get_tree_count();
mexPrintf("Loaded forest of %d trees, retrieving leave node values.\n", num_trees);
mxArray *output_cell_array = mxCreateCellMatrix(1, num_trees);
ASSERT_NON_NULL(output_cell_array);
for (unsigned int t = 0; t < num_trees; t++) {
mxArray* tree_struct = mxCreateStructArray(num_dims, dims, tree_num_fields, tree_field_names);
ASSERT_NON_NULL(tree_struct);
mxSetCell(output_cell_array, t, make_matlab_tree_struct(forest->get_tree(t)));
}
plhs[0] = output_cell_array;
}
示例2: build_rtrees_classifier
static
int build_rtrees_classifier( char* data_filename,
char* filename_to_save, char* filename_to_load )
{
CvMat* data = 0;
CvMat* responses = 0;
CvMat* var_type = 0;
CvMat* sample_idx = 0;
int ok = read_num_class_data( data_filename, 16, &data, &responses );
int nsamples_all = 0, ntrain_samples = 0;
int i = 0;
double train_hr = 0, test_hr = 0;
CvRTrees forest;
CvMat* var_importance = 0;
if( !ok )
{
printf( "Could not read the database %s\n", data_filename );
return -1;
}
printf( "The database %s is loaded.\n", data_filename );
nsamples_all = data->rows;
ntrain_samples = (int)(nsamples_all*0.8);
// Create or load Random Trees classifier
if( filename_to_load )
{
// load classifier from the specified file
forest.load( filename_to_load );
ntrain_samples = 0;
if( forest.get_tree_count() == 0 )
{
printf( "Could not read the classifier %s\n", filename_to_load );
return -1;
}
printf( "The classifier %s is loaded.\n", data_filename );
}
else
{
// create classifier by using <data> and <responses>
printf( "Training the classifier ...\n");
// 1. create type mask
var_type = cvCreateMat( data->cols + 1, 1, CV_8U );
cvSet( var_type, cvScalarAll(CV_VAR_ORDERED) );
cvSetReal1D( var_type, data->cols, CV_VAR_CATEGORICAL );
// 2. create sample_idx
sample_idx = cvCreateMat( 1, nsamples_all, CV_8UC1 );
{
CvMat mat;
cvGetCols( sample_idx, &mat, 0, ntrain_samples );
cvSet( &mat, cvRealScalar(1) );
cvGetCols( sample_idx, &mat, ntrain_samples, nsamples_all );
cvSetZero( &mat );
}
// 3. train classifier
forest.train( data, CV_ROW_SAMPLE, responses, 0, sample_idx, var_type, 0,
CvRTParams(10,10,0,false,15,0,true,4,100,0.01f,CV_TERMCRIT_ITER));
printf( "\n");
}
// compute prediction error on train and test data
for( i = 0; i < nsamples_all; i++ )
{
double r;
CvMat sample;
cvGetRow( data, &sample, i );
r = forest.predict( &sample );
r = fabs((double)r - responses->data.fl[i]) <= FLT_EPSILON ? 1 : 0;
if( i < ntrain_samples )
train_hr += r;
else
test_hr += r;
}
test_hr /= (double)(nsamples_all-ntrain_samples);
train_hr /= (double)ntrain_samples;
printf( "Recognition rate: train = %.1f%%, test = %.1f%%\n",
train_hr*100., test_hr*100. );
printf( "Number of trees: %d\n", forest.get_tree_count() );
// Print variable importance
var_importance = (CvMat*)forest.get_var_importance();
if( var_importance )
{
double rt_imp_sum = cvSum( var_importance ).val[0];
printf("var#\timportance (in %%):\n");
for( i = 0; i < var_importance->cols; i++ )
printf( "%-2d\t%-4.1f\n", i,
100.f*var_importance->data.fl[i]/rt_imp_sum);
}
//.........这里部分代码省略.........
示例3: train
int RandomTrees::train(const char* samples_filename, const char* model_filename, const double ratio, double &train_error, double &test_error)
{
CvMat* data = 0;
CvMat* responses = 0;
CvMat* var_type = 0;
CvMat* sample_idx = 0;
this->tree_parameters_.nactive_vars = (int)sqrt(this->number_of_features_);
int ok = read_num_class_data( samples_filename, this->number_of_features_, &data, &responses );
int nsamples_all = 0, ntrain_samples = 0;
int i = 0;
double train_hr = 0, test_hr = 0;
CvRTrees forest;
CvMat* var_importance = 0;
if( !ok )
{
cout << "Could not read the sample in" << samples_filename << endl;;
return -1;
}
cout << "The sample file " << samples_filename << " is loaded." << endl;
nsamples_all = data->rows;
ntrain_samples = (int)(nsamples_all * ratio);
// create classifier by using <data> and <responses>
cout << "Training the classifier ..." << endl;
// 1. create type mask
var_type = cvCreateMat( data->cols + 1, 1, CV_8U );
cvSet( var_type, cvScalarAll(CV_VAR_ORDERED) );
cvSetReal1D( var_type, data->cols, CV_VAR_CATEGORICAL );
// 2. create sample_idx
sample_idx = cvCreateMat( 1, nsamples_all, CV_8UC1 );
{
CvMat mat;
cvGetCols( sample_idx, &mat, 0, ntrain_samples );
cvSet( &mat, cvRealScalar(1) );
cvGetCols( sample_idx, &mat, ntrain_samples, nsamples_all );
cvSetZero( &mat );
}
// 3. train classifier
forest.train( data, CV_ROW_SAMPLE, responses, 0, sample_idx, var_type, 0, this->tree_parameters_);
cout << endl;
// compute prediction error on train and test data
for( i = 0; i < nsamples_all; i++ )
{
double r;
CvMat sample;
cvGetRow( data, &sample, i );
r = forest.predict( &sample );
r = fabs((double)r - responses->data.fl[i]) <= FLT_EPSILON ? 1 : 0;
if( i < ntrain_samples )
train_hr += r;
else
test_hr += r;
}
test_hr /= (double)(nsamples_all-ntrain_samples);
train_hr /= (double)ntrain_samples;
train_error = 1 - train_hr;
test_error = 1 - test_hr;
cout << "Recognition rate: train = " << train_hr*100 << ", test = " << test_hr*100 << endl;
cout << "Number of trees: " << forest.get_tree_count() << endl;
// Print variable importance
var_importance = (CvMat*)forest.get_var_importance();
if( var_importance )
{
double rt_imp_sum = cvSum( var_importance ).val[0];
printf("var#\timportance (in %%):\n");
for( i = 0; i < var_importance->cols; i++ )
printf( "%-2d\t%-4.1f\n", i,100.f*var_importance->data.fl[i]/rt_imp_sum);
}
// Save Random Trees classifier to file if needed
if( model_filename )
forest.save( model_filename );
//cvReleaseMat( &var_importance ); //causes a segmentation fault
cvReleaseMat( &sample_idx );
cvReleaseMat( &var_type );
cvReleaseMat( &data );
cvReleaseMat( &responses );
return 0;
}