标签:icvgetusedvalues 级分类器 haar特征值
本内容的源码在opencv开源源码的........\opencv\sources\apps\haartraining\cvhaartraining.cpp文件中,具体内容如下:
/* *icvGetUsedValues *作用:根据级分类器来计算训练样本的特征值 */ static CvMat* icvGetUsedValues( CvHaarTrainingData* training_data,//全部训练数据,包含积分图,旋转积分图等信息 int start, int num, //需要计算从第start开始的共num个训练样本的特征值 CvIntHaarFeatures* haar_features, //所有的内部haar特征 CvStageHaarClassifier* stage ) //级分类器 { CvMat* ptr = NULL; CvMat* feature_idx = NULL; CV_FUNCNAME( "icvGetUsedValues" ); __BEGIN__; int num_splits; int i, j; int r; int total, last; num_splits = icvNumSplits( stage ); //得到级分类器中分类回归树分类器的数量 CV_CALL( feature_idx = cvCreateMat( 1, num_splits, CV_32SC1 ) ); total = 0; for( i = 0; i < stage->count; i++ ) //遍历级分类器中的分类回归树分类器 { CvCARTHaarClassifier* cart; cart = (CvCARTHaarClassifier*) stage->classifier[i]; for( j = 0; j < cart->count; j++ )//遍历每个分类回归树中的节点 { feature_idx->data.i[total++] = cart->compidx[j];//把强分类器中每个分类回归树分类器中的所有节点的特征的序号存储到features_idx中 } } icvSort_32s( feature_idx->data.i, total, 0 );//把所有特征的序号进行排序 last = 0; for( i = 1; i < total; i++ ) { if( feature_idx->data.i[i] != feature_idx->data.i[last] ) { feature_idx->data.i[++last] = feature_idx->data.i[i]; //把有重复的特征剔除掉 } } total = last + 1; //剔除掉重复特征后特征的总数量 CV_CALL( ptr = cvCreateMat( num, total, CV_32FC1 ) ); #ifdef CV_OPENMP #pragma omp parallel for #endif for( r = start; r < start + num; r++ ) { int c; for( c = 0; c < total; c++ ) { float val, normfactor; int fnum; fnum = feature_idx->data.i[c]; //取特征的序号 val = cvEvalFastHaarFeature( haar_features->fastfeature + fnum, (sum_type*) (training_data->sum.data.ptr + r * training_data->sum.step), (sum_type*) (training_data->tilted.data.ptr + r * training_data->tilted.step) ); //根据haar特征的序号,积分图和旋转积分图来计算快速haar特征值 normfactor = training_data->normfactor.data.fl[r]; val = ( normfactor == 0.0F ) ? 0.0F : (val / normfactor);//特征值的归一化 CV_MAT_ELEM( *ptr, float, r - start, c ) = val; //将归一化的特征值存储在ptr中 } } __END__; cvReleaseMat( &feature_idx ); return ptr; //返回最后结果 }
版权声明:本文为博主原创文章,未经博主允许不得转载。
标签:icvgetusedvalues 级分类器 haar特征值
原文地址:http://blog.csdn.net/ding977921830/article/details/46969195