boost.cpp 57 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718
  1. #include "opencv2/core.hpp"
  2. #include "opencv2/core/utility.hpp"
  3. using cv::Size;
  4. using cv::Mat;
  5. using cv::Point;
  6. using cv::FileStorage;
  7. using cv::Rect;
  8. using cv::Ptr;
  9. using cv::FileNode;
  10. using cv::Mat_;
  11. using cv::Range;
  12. using cv::FileNodeIterator;
  13. using cv::ParallelLoopBody;
  14. using cv::Size;
  15. using cv::Mat;
  16. using cv::Point;
  17. using cv::FileStorage;
  18. using cv::Rect;
  19. using cv::Ptr;
  20. using cv::FileNode;
  21. using cv::Mat_;
  22. using cv::Range;
  23. using cv::FileNodeIterator;
  24. using cv::ParallelLoopBody;
  25. #include "boost.h"
  26. #include "cascadeclassifier.h"
  27. #include <queue>
  28. #include "cvconfig.h"
  29. using namespace std;
  30. static inline double
  31. logRatio( double val )
  32. {
  33. const double eps = 1e-5;
  34. val = max( val, eps );
  35. val = min( val, 1. - eps );
  36. return log( val/(1. - val) );
  37. }
  38. template<typename T, typename Idx>
  39. class LessThanIdx
  40. {
  41. public:
  42. LessThanIdx( const T* _arr ) : arr(_arr) {}
  43. bool operator()(Idx a, Idx b) const { return arr[a] < arr[b]; }
  44. const T* arr;
  45. };
  46. static inline int cvAlign( int size, int align )
  47. {
  48. CV_DbgAssert( (align & (align-1)) == 0 && size < INT_MAX );
  49. return (size + align - 1) & -align;
  50. }
  51. #define CV_THRESHOLD_EPS (0.00001F)
  52. static const int MinBlockSize = 1 << 16;
  53. static const int BlockSizeDelta = 1 << 10;
  54. // TODO remove this code duplication with ml/precomp.hpp
  55. static int CV_CDECL icvCmpIntegers( const void* a, const void* b )
  56. {
  57. return *(const int*)a - *(const int*)b;
  58. }
  59. static CvMat* cvPreprocessIndexArray( const CvMat* idx_arr, int data_arr_size, bool check_for_duplicates=false )
  60. {
  61. CvMat* idx = 0;
  62. CV_FUNCNAME( "cvPreprocessIndexArray" );
  63. __CV_BEGIN__;
  64. int i, idx_total, idx_selected = 0, step, type, prev = INT_MIN, is_sorted = 1;
  65. uchar* srcb = 0;
  66. int* srci = 0;
  67. int* dsti;
  68. if( !CV_IS_MAT(idx_arr) )
  69. CV_ERROR( CV_StsBadArg, "Invalid index array" );
  70. if( idx_arr->rows != 1 && idx_arr->cols != 1 )
  71. CV_ERROR( CV_StsBadSize, "the index array must be 1-dimensional" );
  72. idx_total = idx_arr->rows + idx_arr->cols - 1;
  73. srcb = idx_arr->data.ptr;
  74. srci = idx_arr->data.i;
  75. type = CV_MAT_TYPE(idx_arr->type);
  76. step = CV_IS_MAT_CONT(idx_arr->type) ? 1 : idx_arr->step/CV_ELEM_SIZE(type);
  77. switch( type )
  78. {
  79. case CV_8UC1:
  80. case CV_8SC1:
  81. // idx_arr is array of 1's and 0's -
  82. // i.e. it is a mask of the selected components
  83. if( idx_total != data_arr_size )
  84. CV_ERROR( CV_StsUnmatchedSizes,
  85. "Component mask should contain as many elements as the total number of input variables" );
  86. for( i = 0; i < idx_total; i++ )
  87. idx_selected += srcb[i*step] != 0;
  88. if( idx_selected == 0 )
  89. CV_ERROR( CV_StsOutOfRange, "No components/input_variables is selected!" );
  90. break;
  91. case CV_32SC1:
  92. // idx_arr is array of integer indices of selected components
  93. if( idx_total > data_arr_size )
  94. CV_ERROR( CV_StsOutOfRange,
  95. "index array may not contain more elements than the total number of input variables" );
  96. idx_selected = idx_total;
  97. // check if sorted already
  98. for( i = 0; i < idx_total; i++ )
  99. {
  100. int val = srci[i*step];
  101. if( val >= prev )
  102. {
  103. is_sorted = 0;
  104. break;
  105. }
  106. prev = val;
  107. }
  108. break;
  109. default:
  110. CV_ERROR( CV_StsUnsupportedFormat, "Unsupported index array data type "
  111. "(it should be 8uC1, 8sC1 or 32sC1)" );
  112. }
  113. CV_CALL( idx = cvCreateMat( 1, idx_selected, CV_32SC1 ));
  114. dsti = idx->data.i;
  115. if( type < CV_32SC1 )
  116. {
  117. for( i = 0; i < idx_total; i++ )
  118. if( srcb[i*step] )
  119. *dsti++ = i;
  120. }
  121. else
  122. {
  123. for( i = 0; i < idx_total; i++ )
  124. dsti[i] = srci[i*step];
  125. if( !is_sorted )
  126. qsort( dsti, idx_total, sizeof(dsti[0]), icvCmpIntegers );
  127. if( dsti[0] < 0 || dsti[idx_total-1] >= data_arr_size )
  128. CV_ERROR( CV_StsOutOfRange, "the index array elements are out of range" );
  129. if( check_for_duplicates )
  130. {
  131. for( i = 1; i < idx_total; i++ )
  132. if( dsti[i] <= dsti[i-1] )
  133. CV_ERROR( CV_StsBadArg, "There are duplicated index array elements" );
  134. }
  135. }
  136. __CV_END__;
  137. if( cvGetErrStatus() < 0 )
  138. cvReleaseMat( &idx );
  139. return idx;
  140. }
  141. //----------------------------- CascadeBoostParams -------------------------------------------------
  142. CvCascadeBoostParams::CvCascadeBoostParams() : minHitRate( 0.995F), maxFalseAlarm( 0.5F )
  143. {
  144. boost_type = CvBoost::GENTLE;
  145. use_surrogates = use_1se_rule = truncate_pruned_tree = false;
  146. }
  147. CvCascadeBoostParams::CvCascadeBoostParams( int _boostType,
  148. float _minHitRate, float _maxFalseAlarm,
  149. double _weightTrimRate, int _maxDepth, int _maxWeakCount ) :
  150. CvBoostParams( _boostType, _maxWeakCount, _weightTrimRate, _maxDepth, false, 0 )
  151. {
  152. boost_type = CvBoost::GENTLE;
  153. minHitRate = _minHitRate;
  154. maxFalseAlarm = _maxFalseAlarm;
  155. use_surrogates = use_1se_rule = truncate_pruned_tree = false;
  156. }
  157. void CvCascadeBoostParams::write( FileStorage &fs ) const
  158. {
  159. string boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
  160. boost_type == CvBoost::REAL ? CC_REAL_BOOST :
  161. boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
  162. boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : string();
  163. CV_Assert( !boostTypeStr.empty() );
  164. fs << CC_BOOST_TYPE << boostTypeStr;
  165. fs << CC_MINHITRATE << minHitRate;
  166. fs << CC_MAXFALSEALARM << maxFalseAlarm;
  167. fs << CC_TRIM_RATE << weight_trim_rate;
  168. fs << CC_MAX_DEPTH << max_depth;
  169. fs << CC_WEAK_COUNT << weak_count;
  170. }
  171. bool CvCascadeBoostParams::read( const FileNode &node )
  172. {
  173. string boostTypeStr;
  174. FileNode rnode = node[CC_BOOST_TYPE];
  175. rnode >> boostTypeStr;
  176. boost_type = !boostTypeStr.compare( CC_DISCRETE_BOOST ) ? CvBoost::DISCRETE :
  177. !boostTypeStr.compare( CC_REAL_BOOST ) ? CvBoost::REAL :
  178. !boostTypeStr.compare( CC_LOGIT_BOOST ) ? CvBoost::LOGIT :
  179. !boostTypeStr.compare( CC_GENTLE_BOOST ) ? CvBoost::GENTLE : -1;
  180. if (boost_type == -1)
  181. CV_Error( CV_StsBadArg, "unsupported Boost type" );
  182. node[CC_MINHITRATE] >> minHitRate;
  183. node[CC_MAXFALSEALARM] >> maxFalseAlarm;
  184. node[CC_TRIM_RATE] >> weight_trim_rate ;
  185. node[CC_MAX_DEPTH] >> max_depth ;
  186. node[CC_WEAK_COUNT] >> weak_count ;
  187. if ( minHitRate <= 0 || minHitRate > 1 ||
  188. maxFalseAlarm <= 0 || maxFalseAlarm > 1 ||
  189. weight_trim_rate <= 0 || weight_trim_rate > 1 ||
  190. max_depth <= 0 || weak_count <= 0 )
  191. CV_Error( CV_StsBadArg, "bad parameters range");
  192. return true;
  193. }
  194. void CvCascadeBoostParams::printDefaults() const
  195. {
  196. cout << "--boostParams--" << endl;
  197. cout << " [-bt <{" << CC_DISCRETE_BOOST << ", "
  198. << CC_REAL_BOOST << ", "
  199. << CC_LOGIT_BOOST ", "
  200. << CC_GENTLE_BOOST << "(default)}>]" << endl;
  201. cout << " [-minHitRate <min_hit_rate> = " << minHitRate << ">]" << endl;
  202. cout << " [-maxFalseAlarmRate <max_false_alarm_rate = " << maxFalseAlarm << ">]" << endl;
  203. cout << " [-weightTrimRate <weight_trim_rate = " << weight_trim_rate << ">]" << endl;
  204. cout << " [-maxDepth <max_depth_of_weak_tree = " << max_depth << ">]" << endl;
  205. cout << " [-maxWeakCount <max_weak_tree_count = " << weak_count << ">]" << endl;
  206. }
  207. void CvCascadeBoostParams::printAttrs() const
  208. {
  209. string boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
  210. boost_type == CvBoost::REAL ? CC_REAL_BOOST :
  211. boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
  212. boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : string();
  213. CV_Assert( !boostTypeStr.empty() );
  214. cout << "boostType: " << boostTypeStr << endl;
  215. cout << "minHitRate: " << minHitRate << endl;
  216. cout << "maxFalseAlarmRate: " << maxFalseAlarm << endl;
  217. cout << "weightTrimRate: " << weight_trim_rate << endl;
  218. cout << "maxDepth: " << max_depth << endl;
  219. cout << "maxWeakCount: " << weak_count << endl;
  220. }
  221. bool CvCascadeBoostParams::scanAttr( const string prmName, const string val)
  222. {
  223. bool res = true;
  224. if( !prmName.compare( "-bt" ) )
  225. {
  226. boost_type = !val.compare( CC_DISCRETE_BOOST ) ? CvBoost::DISCRETE :
  227. !val.compare( CC_REAL_BOOST ) ? CvBoost::REAL :
  228. !val.compare( CC_LOGIT_BOOST ) ? CvBoost::LOGIT :
  229. !val.compare( CC_GENTLE_BOOST ) ? CvBoost::GENTLE : -1;
  230. if (boost_type == -1)
  231. res = false;
  232. }
  233. else if( !prmName.compare( "-minHitRate" ) )
  234. {
  235. minHitRate = (float) atof( val.c_str() );
  236. }
  237. else if( !prmName.compare( "-maxFalseAlarmRate" ) )
  238. {
  239. maxFalseAlarm = (float) atof( val.c_str() );
  240. }
  241. else if( !prmName.compare( "-weightTrimRate" ) )
  242. {
  243. weight_trim_rate = (float) atof( val.c_str() );
  244. }
  245. else if( !prmName.compare( "-maxDepth" ) )
  246. {
  247. max_depth = atoi( val.c_str() );
  248. }
  249. else if( !prmName.compare( "-maxWeakCount" ) )
  250. {
  251. weak_count = atoi( val.c_str() );
  252. }
  253. else
  254. res = false;
  255. return res;
  256. }
  257. CvDTreeNode* CvCascadeBoostTrainData::subsample_data( const CvMat* _subsample_idx )
  258. {
  259. CvDTreeNode* root = 0;
  260. CvMat* isubsample_idx = 0;
  261. CvMat* subsample_co = 0;
  262. bool isMakeRootCopy = true;
  263. if( !data_root )
  264. CV_Error( CV_StsError, "No training data has been set" );
  265. if( _subsample_idx )
  266. {
  267. CV_Assert( (isubsample_idx = cvPreprocessIndexArray( _subsample_idx, sample_count )) != 0 );
  268. if( isubsample_idx->cols + isubsample_idx->rows - 1 == sample_count )
  269. {
  270. const int* sidx = isubsample_idx->data.i;
  271. for( int i = 0; i < sample_count; i++ )
  272. {
  273. if( sidx[i] != i )
  274. {
  275. isMakeRootCopy = false;
  276. break;
  277. }
  278. }
  279. }
  280. else
  281. isMakeRootCopy = false;
  282. }
  283. if( isMakeRootCopy )
  284. {
  285. // make a copy of the root node
  286. CvDTreeNode temp;
  287. int i;
  288. root = new_node( 0, 1, 0, 0 );
  289. temp = *root;
  290. *root = *data_root;
  291. root->num_valid = temp.num_valid;
  292. if( root->num_valid )
  293. {
  294. for( i = 0; i < var_count; i++ )
  295. root->num_valid[i] = data_root->num_valid[i];
  296. }
  297. root->cv_Tn = temp.cv_Tn;
  298. root->cv_node_risk = temp.cv_node_risk;
  299. root->cv_node_error = temp.cv_node_error;
  300. }
  301. else
  302. {
  303. int* sidx = isubsample_idx->data.i;
  304. // co - array of count/offset pairs (to handle duplicated values in _subsample_idx)
  305. int* co, cur_ofs = 0;
  306. int workVarCount = get_work_var_count();
  307. int count = isubsample_idx->rows + isubsample_idx->cols - 1;
  308. root = new_node( 0, count, 1, 0 );
  309. CV_Assert( (subsample_co = cvCreateMat( 1, sample_count*2, CV_32SC1 )) != 0);
  310. cvZero( subsample_co );
  311. co = subsample_co->data.i;
  312. for( int i = 0; i < count; i++ )
  313. co[sidx[i]*2]++;
  314. for( int i = 0; i < sample_count; i++ )
  315. {
  316. if( co[i*2] )
  317. {
  318. co[i*2+1] = cur_ofs;
  319. cur_ofs += co[i*2];
  320. }
  321. else
  322. co[i*2+1] = -1;
  323. }
  324. cv::AutoBuffer<uchar> inn_buf(sample_count*(2*sizeof(int) + sizeof(float)));
  325. // subsample ordered variables
  326. for( int vi = 0; vi < numPrecalcIdx; vi++ )
  327. {
  328. int ci = get_var_type(vi);
  329. CV_Assert( ci < 0 );
  330. int *src_idx_buf = (int*)inn_buf.data();
  331. float *src_val_buf = (float*)(src_idx_buf + sample_count);
  332. int* sample_indices_buf = (int*)(src_val_buf + sample_count);
  333. const int* src_idx = 0;
  334. const float* src_val = 0;
  335. get_ord_var_data( data_root, vi, src_val_buf, src_idx_buf, &src_val, &src_idx, sample_indices_buf );
  336. int j = 0, idx, count_i;
  337. int num_valid = data_root->get_num_valid(vi);
  338. CV_Assert( num_valid == sample_count );
  339. if (is_buf_16u)
  340. {
  341. unsigned short* udst_idx = (unsigned short*)(buf->data.s + root->buf_idx*get_length_subbuf() +
  342. (size_t)vi*sample_count + data_root->offset);
  343. for( int i = 0; i < num_valid; i++ )
  344. {
  345. idx = src_idx[i];
  346. count_i = co[idx*2];
  347. if( count_i )
  348. for( cur_ofs = co[idx*2+1]; count_i > 0; count_i--, j++, cur_ofs++ )
  349. udst_idx[j] = (unsigned short)cur_ofs;
  350. }
  351. }
  352. else
  353. {
  354. int* idst_idx = buf->data.i + root->buf_idx*get_length_subbuf() +
  355. (size_t)vi*sample_count + root->offset;
  356. for( int i = 0; i < num_valid; i++ )
  357. {
  358. idx = src_idx[i];
  359. count_i = co[idx*2];
  360. if( count_i )
  361. for( cur_ofs = co[idx*2+1]; count_i > 0; count_i--, j++, cur_ofs++ )
  362. idst_idx[j] = cur_ofs;
  363. }
  364. }
  365. }
  366. // subsample cv_lables
  367. const int* src_lbls = get_cv_labels(data_root, (int*)inn_buf.data());
  368. if (is_buf_16u)
  369. {
  370. unsigned short* udst = (unsigned short*)(buf->data.s + root->buf_idx*get_length_subbuf() +
  371. (size_t)(workVarCount-1)*sample_count + root->offset);
  372. for( int i = 0; i < count; i++ )
  373. udst[i] = (unsigned short)src_lbls[sidx[i]];
  374. }
  375. else
  376. {
  377. int* idst = buf->data.i + root->buf_idx*get_length_subbuf() +
  378. (size_t)(workVarCount-1)*sample_count + root->offset;
  379. for( int i = 0; i < count; i++ )
  380. idst[i] = src_lbls[sidx[i]];
  381. }
  382. // subsample sample_indices
  383. const int* sample_idx_src = get_sample_indices(data_root, (int*)inn_buf.data());
  384. if (is_buf_16u)
  385. {
  386. unsigned short* sample_idx_dst = (unsigned short*)(buf->data.s + root->buf_idx*get_length_subbuf() +
  387. (size_t)workVarCount*sample_count + root->offset);
  388. for( int i = 0; i < count; i++ )
  389. sample_idx_dst[i] = (unsigned short)sample_idx_src[sidx[i]];
  390. }
  391. else
  392. {
  393. int* sample_idx_dst = buf->data.i + root->buf_idx*get_length_subbuf() +
  394. (size_t)workVarCount*sample_count + root->offset;
  395. for( int i = 0; i < count; i++ )
  396. sample_idx_dst[i] = sample_idx_src[sidx[i]];
  397. }
  398. for( int vi = 0; vi < var_count; vi++ )
  399. root->set_num_valid(vi, count);
  400. }
  401. cvReleaseMat( &isubsample_idx );
  402. cvReleaseMat( &subsample_co );
  403. return root;
  404. }
  405. //---------------------------- CascadeBoostTrainData -----------------------------
  406. CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
  407. const CvDTreeParams& _params )
  408. {
  409. is_classifier = true;
  410. var_all = var_count = (int)_featureEvaluator->getNumFeatures();
  411. featureEvaluator = _featureEvaluator;
  412. shared = true;
  413. set_params( _params );
  414. max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() );
  415. var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
  416. if ( featureEvaluator->getMaxCatCount() > 0 )
  417. {
  418. numPrecalcIdx = 0;
  419. cat_var_count = var_count;
  420. ord_var_count = 0;
  421. for( int vi = 0; vi < var_count; vi++ )
  422. {
  423. var_type->data.i[vi] = vi;
  424. }
  425. }
  426. else
  427. {
  428. cat_var_count = 0;
  429. ord_var_count = var_count;
  430. for( int vi = 1; vi <= var_count; vi++ )
  431. {
  432. var_type->data.i[vi-1] = -vi;
  433. }
  434. }
  435. var_type->data.i[var_count] = cat_var_count;
  436. var_type->data.i[var_count+1] = cat_var_count+1;
  437. int maxSplitSize = cvAlign(sizeof(CvDTreeSplit) + (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
  438. int treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
  439. treeBlockSize = MAX(treeBlockSize + BlockSizeDelta, MinBlockSize);
  440. tree_storage = cvCreateMemStorage( treeBlockSize );
  441. node_heap = cvCreateSet( 0, sizeof(node_heap[0]), sizeof(CvDTreeNode), tree_storage );
  442. split_heap = cvCreateSet( 0, sizeof(split_heap[0]), maxSplitSize, tree_storage );
  443. }
  444. CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
  445. int _numSamples,
  446. int _precalcValBufSize, int _precalcIdxBufSize,
  447. const CvDTreeParams& _params )
  448. {
  449. setData( _featureEvaluator, _numSamples, _precalcValBufSize, _precalcIdxBufSize, _params );
  450. }
  451. void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluator,
  452. int _numSamples,
  453. int _precalcValBufSize, int _precalcIdxBufSize,
  454. const CvDTreeParams& _params )
  455. {
  456. int* idst = 0;
  457. unsigned short* udst = 0;
  458. uint64 effective_buf_size = 0;
  459. int effective_buf_height = 0, effective_buf_width = 0;
  460. clear();
  461. shared = true;
  462. have_labels = true;
  463. have_priors = false;
  464. is_classifier = true;
  465. rng = &cv::theRNG();
  466. set_params( _params );
  467. CV_Assert( _featureEvaluator );
  468. featureEvaluator = _featureEvaluator;
  469. max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() );
  470. _resp = cvMat(featureEvaluator->getCls());
  471. responses = &_resp;
  472. // TODO: check responses: elements must be 0 or 1
  473. if( _precalcValBufSize < 0 || _precalcIdxBufSize < 0)
  474. CV_Error( CV_StsOutOfRange, "_numPrecalcVal and _numPrecalcIdx must be positive or 0" );
  475. var_count = var_all = featureEvaluator->getNumFeatures() * featureEvaluator->getFeatureSize();
  476. sample_count = _numSamples;
  477. is_buf_16u = false;
  478. if (sample_count < 65536)
  479. is_buf_16u = true;
  480. numPrecalcVal = min( cvRound((double)_precalcValBufSize*1048576. / (sizeof(float)*sample_count)), var_count );
  481. numPrecalcIdx = min( cvRound((double)_precalcIdxBufSize*1048576. /
  482. ((is_buf_16u ? sizeof(unsigned short) : sizeof (int))*sample_count)), var_count );
  483. assert( numPrecalcIdx >= 0 && numPrecalcVal >= 0 );
  484. valCache.create( numPrecalcVal, sample_count, CV_32FC1 );
  485. var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
  486. if ( featureEvaluator->getMaxCatCount() > 0 )
  487. {
  488. numPrecalcIdx = 0;
  489. cat_var_count = var_count;
  490. ord_var_count = 0;
  491. for( int vi = 0; vi < var_count; vi++ )
  492. {
  493. var_type->data.i[vi] = vi;
  494. }
  495. }
  496. else
  497. {
  498. cat_var_count = 0;
  499. ord_var_count = var_count;
  500. for( int vi = 1; vi <= var_count; vi++ )
  501. {
  502. var_type->data.i[vi-1] = -vi;
  503. }
  504. }
  505. var_type->data.i[var_count] = cat_var_count;
  506. var_type->data.i[var_count+1] = cat_var_count+1;
  507. work_var_count = ( cat_var_count ? 0 : numPrecalcIdx ) + 1/*cv_lables*/;
  508. buf_count = 2;
  509. buf_size = -1; // the member buf_size is obsolete
  510. effective_buf_size = (uint64)(work_var_count + 1)*(uint64)sample_count * buf_count; // this is the total size of "CvMat buf" to be allocated
  511. effective_buf_width = sample_count;
  512. effective_buf_height = work_var_count+1;
  513. if (effective_buf_width >= effective_buf_height)
  514. effective_buf_height *= buf_count;
  515. else
  516. effective_buf_width *= buf_count;
  517. if ((uint64)effective_buf_width * (uint64)effective_buf_height != effective_buf_size)
  518. {
  519. CV_Error(CV_StsBadArg, "The memory buffer cannot be allocated since its size exceeds integer fields limit");
  520. }
  521. if ( is_buf_16u )
  522. buf = cvCreateMat( effective_buf_height, effective_buf_width, CV_16UC1 );
  523. else
  524. buf = cvCreateMat( effective_buf_height, effective_buf_width, CV_32SC1 );
  525. cat_count = cvCreateMat( 1, cat_var_count + 1, CV_32SC1 );
  526. // precalculate valCache and set indices in buf
  527. precalculate();
  528. // now calculate the maximum size of split,
  529. // create memory storage that will keep nodes and splits of the decision tree
  530. // allocate root node and the buffer for the whole training data
  531. int maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
  532. (MAX(0,sample_count - 33)/32)*sizeof(int),sizeof(void*));
  533. int treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
  534. treeBlockSize = MAX(treeBlockSize + BlockSizeDelta, MinBlockSize);
  535. tree_storage = cvCreateMemStorage( treeBlockSize );
  536. node_heap = cvCreateSet( 0, sizeof(*node_heap), sizeof(CvDTreeNode), tree_storage );
  537. int nvSize = var_count*sizeof(int);
  538. nvSize = cvAlign(MAX( nvSize, (int)sizeof(CvSetElem) ), sizeof(void*));
  539. int tempBlockSize = nvSize;
  540. tempBlockSize = MAX( tempBlockSize + BlockSizeDelta, MinBlockSize );
  541. temp_storage = cvCreateMemStorage( tempBlockSize );
  542. nv_heap = cvCreateSet( 0, sizeof(*nv_heap), nvSize, temp_storage );
  543. data_root = new_node( 0, sample_count, 0, 0 );
  544. // set sample labels
  545. if (is_buf_16u)
  546. udst = (unsigned short*)(buf->data.s + (size_t)work_var_count*sample_count);
  547. else
  548. idst = buf->data.i + (size_t)work_var_count*sample_count;
  549. for (int si = 0; si < sample_count; si++)
  550. {
  551. if (udst)
  552. udst[si] = (unsigned short)si;
  553. else
  554. idst[si] = si;
  555. }
  556. for( int vi = 0; vi < var_count; vi++ )
  557. data_root->set_num_valid(vi, sample_count);
  558. for( int vi = 0; vi < cat_var_count; vi++ )
  559. cat_count->data.i[vi] = max_c_count;
  560. cat_count->data.i[cat_var_count] = 2;
  561. maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
  562. (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
  563. split_heap = cvCreateSet( 0, sizeof(*split_heap), maxSplitSize, tree_storage );
  564. priors = cvCreateMat( 1, get_num_classes(), CV_64F );
  565. cvSet(priors, cvScalar(1));
  566. priors_mult = cvCloneMat( priors );
  567. counts = cvCreateMat( 1, get_num_classes(), CV_32SC1 );
  568. direction = cvCreateMat( 1, sample_count, CV_8UC1 );
  569. split_buf = cvCreateMat( 1, sample_count, CV_32SC1 );//TODO: make a pointer
  570. }
  571. void CvCascadeBoostTrainData::free_train_data()
  572. {
  573. CvDTreeTrainData::free_train_data();
  574. valCache.release();
  575. }
  576. const int* CvCascadeBoostTrainData::get_class_labels( CvDTreeNode* n, int* labelsBuf)
  577. {
  578. int nodeSampleCount = n->sample_count;
  579. int rStep = CV_IS_MAT_CONT( responses->type ) ? 1 : responses->step / CV_ELEM_SIZE( responses->type );
  580. int* sampleIndicesBuf = labelsBuf; //
  581. const int* sampleIndices = get_sample_indices(n, sampleIndicesBuf);
  582. for( int si = 0; si < nodeSampleCount; si++ )
  583. {
  584. int sidx = sampleIndices[si];
  585. labelsBuf[si] = (int)responses->data.fl[sidx*rStep];
  586. }
  587. return labelsBuf;
  588. }
  589. const int* CvCascadeBoostTrainData::get_sample_indices( CvDTreeNode* n, int* indicesBuf )
  590. {
  591. return CvDTreeTrainData::get_cat_var_data( n, get_work_var_count(), indicesBuf );
  592. }
  593. const int* CvCascadeBoostTrainData::get_cv_labels( CvDTreeNode* n, int* labels_buf )
  594. {
  595. return CvDTreeTrainData::get_cat_var_data( n, get_work_var_count() - 1, labels_buf );
  596. }
  597. void CvCascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* ordValuesBuf, int* sortedIndicesBuf,
  598. const float** ordValues, const int** sortedIndices, int* sampleIndicesBuf )
  599. {
  600. int nodeSampleCount = n->sample_count;
  601. const int* sampleIndices = get_sample_indices(n, sampleIndicesBuf);
  602. if ( vi < numPrecalcIdx )
  603. {
  604. if( !is_buf_16u )
  605. *sortedIndices = buf->data.i + n->buf_idx*get_length_subbuf() + (size_t)vi*sample_count + n->offset;
  606. else
  607. {
  608. const unsigned short* shortIndices = (const unsigned short*)(buf->data.s + n->buf_idx*get_length_subbuf() +
  609. (size_t)vi*sample_count + n->offset );
  610. for( int i = 0; i < nodeSampleCount; i++ )
  611. sortedIndicesBuf[i] = shortIndices[i];
  612. *sortedIndices = sortedIndicesBuf;
  613. }
  614. if( vi < numPrecalcVal )
  615. {
  616. for( int i = 0; i < nodeSampleCount; i++ )
  617. {
  618. int idx = (*sortedIndices)[i];
  619. idx = sampleIndices[idx];
  620. ordValuesBuf[i] = valCache.at<float>( vi, idx);
  621. }
  622. }
  623. else
  624. {
  625. for( int i = 0; i < nodeSampleCount; i++ )
  626. {
  627. int idx = (*sortedIndices)[i];
  628. idx = sampleIndices[idx];
  629. ordValuesBuf[i] = (*featureEvaluator)( vi, idx);
  630. }
  631. }
  632. }
  633. else // vi >= numPrecalcIdx
  634. {
  635. cv::AutoBuffer<float> abuf(nodeSampleCount);
  636. float* sampleValues = &abuf[0];
  637. if ( vi < numPrecalcVal )
  638. {
  639. for( int i = 0; i < nodeSampleCount; i++ )
  640. {
  641. sortedIndicesBuf[i] = i;
  642. sampleValues[i] = valCache.at<float>( vi, sampleIndices[i] );
  643. }
  644. }
  645. else
  646. {
  647. for( int i = 0; i < nodeSampleCount; i++ )
  648. {
  649. sortedIndicesBuf[i] = i;
  650. sampleValues[i] = (*featureEvaluator)( vi, sampleIndices[i]);
  651. }
  652. }
  653. std::sort(sortedIndicesBuf, sortedIndicesBuf + nodeSampleCount, LessThanIdx<float, int>(&sampleValues[0]) );
  654. for( int i = 0; i < nodeSampleCount; i++ )
  655. ordValuesBuf[i] = (&sampleValues[0])[sortedIndicesBuf[i]];
  656. *sortedIndices = sortedIndicesBuf;
  657. }
  658. *ordValues = ordValuesBuf;
  659. }
  660. const int* CvCascadeBoostTrainData::get_cat_var_data( CvDTreeNode* n, int vi, int* catValuesBuf )
  661. {
  662. int nodeSampleCount = n->sample_count;
  663. int* sampleIndicesBuf = catValuesBuf; //
  664. const int* sampleIndices = get_sample_indices(n, sampleIndicesBuf);
  665. if ( vi < numPrecalcVal )
  666. {
  667. for( int i = 0; i < nodeSampleCount; i++ )
  668. catValuesBuf[i] = (int) valCache.at<float>( vi, sampleIndices[i]);
  669. }
  670. else
  671. {
  672. if( vi >= numPrecalcVal && vi < var_count )
  673. {
  674. for( int i = 0; i < nodeSampleCount; i++ )
  675. catValuesBuf[i] = (int)(*featureEvaluator)( vi, sampleIndices[i] );
  676. }
  677. else
  678. {
  679. get_cv_labels( n, catValuesBuf );
  680. }
  681. }
  682. return catValuesBuf;
  683. }
  684. float CvCascadeBoostTrainData::getVarValue( int vi, int si )
  685. {
  686. if ( vi < numPrecalcVal && !valCache.empty() )
  687. return valCache.at<float>( vi, si );
  688. return (*featureEvaluator)( vi, si );
  689. }
  690. struct FeatureIdxOnlyPrecalc : ParallelLoopBody
  691. {
  692. FeatureIdxOnlyPrecalc( const CvFeatureEvaluator* _featureEvaluator, CvMat* _buf, int _sample_count, bool _is_buf_16u )
  693. {
  694. featureEvaluator = _featureEvaluator;
  695. sample_count = _sample_count;
  696. udst = (unsigned short*)_buf->data.s;
  697. idst = _buf->data.i;
  698. is_buf_16u = _is_buf_16u;
  699. }
  700. void operator()( const Range& range ) const
  701. {
  702. cv::AutoBuffer<float> valCache(sample_count);
  703. float* valCachePtr = valCache.data();
  704. for ( int fi = range.start; fi < range.end; fi++)
  705. {
  706. for( int si = 0; si < sample_count; si++ )
  707. {
  708. valCachePtr[si] = (*featureEvaluator)( fi, si );
  709. if ( is_buf_16u )
  710. *(udst + (size_t)fi*sample_count + si) = (unsigned short)si;
  711. else
  712. *(idst + (size_t)fi*sample_count + si) = si;
  713. }
  714. if ( is_buf_16u )
  715. std::sort(udst + (size_t)fi*sample_count, udst + (size_t)(fi + 1)*sample_count, LessThanIdx<float, unsigned short>(valCachePtr) );
  716. else
  717. std::sort(idst + (size_t)fi*sample_count, idst + (size_t)(fi + 1)*sample_count, LessThanIdx<float, int>(valCachePtr) );
  718. }
  719. }
  720. const CvFeatureEvaluator* featureEvaluator;
  721. int sample_count;
  722. int* idst;
  723. unsigned short* udst;
  724. bool is_buf_16u;
  725. };
  726. struct FeatureValAndIdxPrecalc : ParallelLoopBody
  727. {
  728. FeatureValAndIdxPrecalc( const CvFeatureEvaluator* _featureEvaluator, CvMat* _buf, Mat* _valCache, int _sample_count, bool _is_buf_16u )
  729. {
  730. featureEvaluator = _featureEvaluator;
  731. valCache = _valCache;
  732. sample_count = _sample_count;
  733. udst = (unsigned short*)_buf->data.s;
  734. idst = _buf->data.i;
  735. is_buf_16u = _is_buf_16u;
  736. }
  737. void operator()( const Range& range ) const
  738. {
  739. for ( int fi = range.start; fi < range.end; fi++)
  740. {
  741. for( int si = 0; si < sample_count; si++ )
  742. {
  743. valCache->at<float>(fi,si) = (*featureEvaluator)( fi, si );
  744. if ( is_buf_16u )
  745. *(udst + (size_t)fi*sample_count + si) = (unsigned short)si;
  746. else
  747. *(idst + (size_t)fi*sample_count + si) = si;
  748. }
  749. if ( is_buf_16u )
  750. std::sort(udst + (size_t)fi*sample_count, udst + (size_t)(fi + 1)*sample_count, LessThanIdx<float, unsigned short>(valCache->ptr<float>(fi)) );
  751. else
  752. std::sort(idst + (size_t)fi*sample_count, idst + (size_t)(fi + 1)*sample_count, LessThanIdx<float, int>(valCache->ptr<float>(fi)) );
  753. }
  754. }
  755. const CvFeatureEvaluator* featureEvaluator;
  756. Mat* valCache;
  757. int sample_count;
  758. int* idst;
  759. unsigned short* udst;
  760. bool is_buf_16u;
  761. };
  762. struct FeatureValOnlyPrecalc : ParallelLoopBody
  763. {
  764. FeatureValOnlyPrecalc( const CvFeatureEvaluator* _featureEvaluator, Mat* _valCache, int _sample_count )
  765. {
  766. featureEvaluator = _featureEvaluator;
  767. valCache = _valCache;
  768. sample_count = _sample_count;
  769. }
  770. void operator()( const Range& range ) const
  771. {
  772. for ( int fi = range.start; fi < range.end; fi++)
  773. for( int si = 0; si < sample_count; si++ )
  774. valCache->at<float>(fi,si) = (*featureEvaluator)( fi, si );
  775. }
  776. const CvFeatureEvaluator* featureEvaluator;
  777. Mat* valCache;
  778. int sample_count;
  779. };
  780. void CvCascadeBoostTrainData::precalculate()
  781. {
  782. int minNum = MIN( numPrecalcVal, numPrecalcIdx);
  783. double proctime = -TIME( 0 );
  784. parallel_for_( Range(numPrecalcVal, numPrecalcIdx),
  785. FeatureIdxOnlyPrecalc(featureEvaluator, buf, sample_count, is_buf_16u!=0) );
  786. parallel_for_( Range(0, minNum),
  787. FeatureValAndIdxPrecalc(featureEvaluator, buf, &valCache, sample_count, is_buf_16u!=0) );
  788. parallel_for_( Range(minNum, numPrecalcVal),
  789. FeatureValOnlyPrecalc(featureEvaluator, &valCache, sample_count) );
  790. cout << "Precalculation time: " << (proctime + TIME( 0 )) << endl;
  791. }
  792. //-------------------------------- CascadeBoostTree ----------------------------------------
  793. CvDTreeNode* CvCascadeBoostTree::predict( int sampleIdx ) const
  794. {
  795. CvDTreeNode* node = root;
  796. if( !node )
  797. CV_Error( CV_StsError, "The tree has not been trained yet" );
  798. if ( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount() == 0 ) // ordered
  799. {
  800. while( node->left )
  801. {
  802. CvDTreeSplit* split = node->split;
  803. float val = ((CvCascadeBoostTrainData*)data)->getVarValue( split->var_idx, sampleIdx );
  804. node = val <= split->ord.c ? node->left : node->right;
  805. }
  806. }
  807. else // categorical
  808. {
  809. while( node->left )
  810. {
  811. CvDTreeSplit* split = node->split;
  812. int c = (int)((CvCascadeBoostTrainData*)data)->getVarValue( split->var_idx, sampleIdx );
  813. node = CV_DTREE_CAT_DIR(c, split->subset) < 0 ? node->left : node->right;
  814. }
  815. }
  816. return node;
  817. }
  818. void CvCascadeBoostTree::write( FileStorage &fs, const Mat& featureMap )
  819. {
  820. int maxCatCount = ((CvCascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount();
  821. int subsetN = (maxCatCount + 31)/32;
  822. queue<CvDTreeNode*> internalNodesQueue;
  823. int size = (int)pow( 2.f, (float)ensemble->get_params().max_depth);
  824. std::vector<float> leafVals(size);
  825. int leafValIdx = 0;
  826. int internalNodeIdx = 1;
  827. CvDTreeNode* tempNode;
  828. CV_DbgAssert( root );
  829. internalNodesQueue.push( root );
  830. fs << "{";
  831. fs << CC_INTERNAL_NODES << "[:";
  832. while (!internalNodesQueue.empty())
  833. {
  834. tempNode = internalNodesQueue.front();
  835. CV_Assert( tempNode->left );
  836. if ( !tempNode->left->left && !tempNode->left->right) // left node is leaf
  837. {
  838. leafVals[-leafValIdx] = (float)tempNode->left->value;
  839. fs << leafValIdx-- ;
  840. }
  841. else
  842. {
  843. internalNodesQueue.push( tempNode->left );
  844. fs << internalNodeIdx++;
  845. }
  846. CV_Assert( tempNode->right );
  847. if ( !tempNode->right->left && !tempNode->right->right) // right node is leaf
  848. {
  849. leafVals[-leafValIdx] = (float)tempNode->right->value;
  850. fs << leafValIdx--;
  851. }
  852. else
  853. {
  854. internalNodesQueue.push( tempNode->right );
  855. fs << internalNodeIdx++;
  856. }
  857. int fidx = tempNode->split->var_idx;
  858. fidx = featureMap.empty() ? fidx : featureMap.at<int>(0, fidx);
  859. fs << fidx;
  860. if ( !maxCatCount )
  861. fs << tempNode->split->ord.c;
  862. else
  863. for( int i = 0; i < subsetN; i++ )
  864. fs << tempNode->split->subset[i];
  865. internalNodesQueue.pop();
  866. }
  867. fs << "]"; // CC_INTERNAL_NODES
  868. fs << CC_LEAF_VALUES << "[:";
  869. for (int ni = 0; ni < -leafValIdx; ni++)
  870. fs << leafVals[ni];
  871. fs << "]"; // CC_LEAF_VALUES
  872. fs << "}";
  873. }
  874. void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
  875. CvDTreeTrainData* _data )
  876. {
  877. int maxCatCount = ((CvCascadeBoostTrainData*)_data)->featureEvaluator->getMaxCatCount();
  878. int subsetN = (maxCatCount + 31)/32;
  879. int step = 3 + ( maxCatCount>0 ? subsetN : 1 );
  880. queue<CvDTreeNode*> internalNodesQueue;
  881. int internalNodesIdx, leafValsuesIdx;
  882. CvDTreeNode* prntNode, *cldNode;
  883. clear();
  884. data = _data;
  885. ensemble = _ensemble;
  886. pruned_tree_idx = 0;
  887. // read tree nodes
  888. FileNode rnode = node[CC_INTERNAL_NODES];
  889. internalNodesIdx = (int) rnode.size() - 1;
  890. FileNode lnode = node[CC_LEAF_VALUES];
  891. leafValsuesIdx = (int) lnode.size() - 1;
  892. for( size_t i = 0; i < rnode.size()/step; i++ )
  893. {
  894. prntNode = data->new_node( 0, 0, 0, 0 );
  895. if ( maxCatCount > 0 )
  896. {
  897. prntNode->split = data->new_split_cat( 0, 0 );
  898. for( int j = subsetN-1; j>=0; j--)
  899. {
  900. rnode[internalNodesIdx] >> prntNode->split->subset[j]; --internalNodesIdx;
  901. }
  902. }
  903. else
  904. {
  905. float split_value;
  906. rnode[internalNodesIdx] >> split_value; --internalNodesIdx;
  907. prntNode->split = data->new_split_ord( 0, split_value, 0, 0, 0);
  908. }
  909. rnode[internalNodesIdx] >> prntNode->split->var_idx; --internalNodesIdx;
  910. int ridx, lidx;
  911. rnode[internalNodesIdx] >> ridx; --internalNodesIdx;
  912. rnode[internalNodesIdx] >> lidx; --internalNodesIdx;
  913. if ( ridx <= 0)
  914. {
  915. prntNode->right = cldNode = data->new_node( 0, 0, 0, 0 );
  916. lnode[leafValsuesIdx] >> cldNode->value; --leafValsuesIdx;
  917. cldNode->parent = prntNode;
  918. }
  919. else
  920. {
  921. prntNode->right = internalNodesQueue.front();
  922. prntNode->right->parent = prntNode;
  923. internalNodesQueue.pop();
  924. }
  925. if ( lidx <= 0)
  926. {
  927. prntNode->left = cldNode = data->new_node( 0, 0, 0, 0 );
  928. lnode[leafValsuesIdx] >> cldNode->value; --leafValsuesIdx;
  929. cldNode->parent = prntNode;
  930. }
  931. else
  932. {
  933. prntNode->left = internalNodesQueue.front();
  934. prntNode->left->parent = prntNode;
  935. internalNodesQueue.pop();
  936. }
  937. internalNodesQueue.push( prntNode );
  938. }
  939. root = internalNodesQueue.front();
  940. internalNodesQueue.pop();
  941. }
  942. void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
  943. {
  944. int n = node->sample_count, nl, nr, scount = data->sample_count;
  945. char* dir = (char*)data->direction->data.ptr;
  946. CvDTreeNode *left = 0, *right = 0;
  947. int* newIdx = data->split_buf->data.i;
  948. int newBufIdx = data->get_child_buf_idx( node );
  949. int workVarCount = data->get_work_var_count();
  950. CvMat* buf = data->buf;
  951. size_t length_buf_row = data->get_length_subbuf();
  952. cv::AutoBuffer<uchar> inn_buf(n*(3*sizeof(int)+sizeof(float)));
  953. int* tempBuf = (int*)inn_buf.data();
  954. bool splitInputData;
  955. complete_node_dir(node);
  956. for( int i = nl = nr = 0; i < n; i++ )
  957. {
  958. int d = dir[i];
  959. // initialize new indices for splitting ordered variables
  960. newIdx[i] = (nl & (d-1)) | (nr & -d); // d ? ri : li
  961. nr += d;
  962. nl += d^1;
  963. }
  964. node->left = left = data->new_node( node, nl, newBufIdx, node->offset );
  965. node->right = right = data->new_node( node, nr, newBufIdx, node->offset + nl );
  966. splitInputData = node->depth + 1 < data->params.max_depth &&
  967. (node->left->sample_count > data->params.min_sample_count ||
  968. node->right->sample_count > data->params.min_sample_count);
  969. // split ordered variables, keep both halves sorted.
  970. for( int vi = 0; vi < ((CvCascadeBoostTrainData*)data)->numPrecalcIdx; vi++ )
  971. {
  972. int ci = data->get_var_type(vi);
  973. if( ci >= 0 || !splitInputData )
  974. continue;
  975. int n1 = node->get_num_valid(vi);
  976. float *src_val_buf = (float*)(tempBuf + n);
  977. int *src_sorted_idx_buf = (int*)(src_val_buf + n);
  978. int *src_sample_idx_buf = src_sorted_idx_buf + n;
  979. const int* src_sorted_idx = 0;
  980. const float* src_val = 0;
  981. data->get_ord_var_data(node, vi, src_val_buf, src_sorted_idx_buf, &src_val, &src_sorted_idx, src_sample_idx_buf);
  982. for(int i = 0; i < n; i++)
  983. tempBuf[i] = src_sorted_idx[i];
  984. if (data->is_buf_16u)
  985. {
  986. ushort *ldst, *rdst;
  987. ldst = (ushort*)(buf->data.s + left->buf_idx*length_buf_row +
  988. vi*scount + left->offset);
  989. rdst = (ushort*)(ldst + nl);
  990. // split sorted
  991. for( int i = 0; i < n1; i++ )
  992. {
  993. int idx = tempBuf[i];
  994. int d = dir[idx];
  995. idx = newIdx[idx];
  996. if (d)
  997. {
  998. *rdst = (ushort)idx;
  999. rdst++;
  1000. }
  1001. else
  1002. {
  1003. *ldst = (ushort)idx;
  1004. ldst++;
  1005. }
  1006. }
  1007. CV_Assert( n1 == n );
  1008. }
  1009. else
  1010. {
  1011. int *ldst, *rdst;
  1012. ldst = buf->data.i + left->buf_idx*length_buf_row +
  1013. vi*scount + left->offset;
  1014. rdst = buf->data.i + right->buf_idx*length_buf_row +
  1015. vi*scount + right->offset;
  1016. // split sorted
  1017. for( int i = 0; i < n1; i++ )
  1018. {
  1019. int idx = tempBuf[i];
  1020. int d = dir[idx];
  1021. idx = newIdx[idx];
  1022. if (d)
  1023. {
  1024. *rdst = idx;
  1025. rdst++;
  1026. }
  1027. else
  1028. {
  1029. *ldst = idx;
  1030. ldst++;
  1031. }
  1032. }
  1033. CV_Assert( n1 == n );
  1034. }
  1035. }
  1036. // split cv_labels using newIdx relocation table
  1037. int *src_lbls_buf = tempBuf + n;
  1038. const int* src_lbls = data->get_cv_labels(node, src_lbls_buf);
  1039. for(int i = 0; i < n; i++)
  1040. tempBuf[i] = src_lbls[i];
  1041. if (data->is_buf_16u)
  1042. {
  1043. unsigned short *ldst = (unsigned short *)(buf->data.s + left->buf_idx*length_buf_row +
  1044. (size_t)(workVarCount-1)*scount + left->offset);
  1045. unsigned short *rdst = (unsigned short *)(buf->data.s + right->buf_idx*length_buf_row +
  1046. (size_t)(workVarCount-1)*scount + right->offset);
  1047. for( int i = 0; i < n; i++ )
  1048. {
  1049. int idx = tempBuf[i];
  1050. if (dir[i])
  1051. {
  1052. *rdst = (unsigned short)idx;
  1053. rdst++;
  1054. }
  1055. else
  1056. {
  1057. *ldst = (unsigned short)idx;
  1058. ldst++;
  1059. }
  1060. }
  1061. }
  1062. else
  1063. {
  1064. int *ldst = buf->data.i + left->buf_idx*length_buf_row +
  1065. (size_t)(workVarCount-1)*scount + left->offset;
  1066. int *rdst = buf->data.i + right->buf_idx*length_buf_row +
  1067. (size_t)(workVarCount-1)*scount + right->offset;
  1068. for( int i = 0; i < n; i++ )
  1069. {
  1070. int idx = tempBuf[i];
  1071. if (dir[i])
  1072. {
  1073. *rdst = idx;
  1074. rdst++;
  1075. }
  1076. else
  1077. {
  1078. *ldst = idx;
  1079. ldst++;
  1080. }
  1081. }
  1082. }
  1083. // split sample indices
  1084. int *sampleIdx_src_buf = tempBuf + n;
  1085. const int* sampleIdx_src = data->get_sample_indices(node, sampleIdx_src_buf);
  1086. for(int i = 0; i < n; i++)
  1087. tempBuf[i] = sampleIdx_src[i];
  1088. if (data->is_buf_16u)
  1089. {
  1090. unsigned short* ldst = (unsigned short*)(buf->data.s + left->buf_idx*length_buf_row +
  1091. (size_t)workVarCount*scount + left->offset);
  1092. unsigned short* rdst = (unsigned short*)(buf->data.s + right->buf_idx*length_buf_row +
  1093. (size_t)workVarCount*scount + right->offset);
  1094. for (int i = 0; i < n; i++)
  1095. {
  1096. unsigned short idx = (unsigned short)tempBuf[i];
  1097. if (dir[i])
  1098. {
  1099. *rdst = idx;
  1100. rdst++;
  1101. }
  1102. else
  1103. {
  1104. *ldst = idx;
  1105. ldst++;
  1106. }
  1107. }
  1108. }
  1109. else
  1110. {
  1111. int* ldst = buf->data.i + left->buf_idx*length_buf_row +
  1112. (size_t)workVarCount*scount + left->offset;
  1113. int* rdst = buf->data.i + right->buf_idx*length_buf_row +
  1114. (size_t)workVarCount*scount + right->offset;
  1115. for (int i = 0; i < n; i++)
  1116. {
  1117. int idx = tempBuf[i];
  1118. if (dir[i])
  1119. {
  1120. *rdst = idx;
  1121. rdst++;
  1122. }
  1123. else
  1124. {
  1125. *ldst = idx;
  1126. ldst++;
  1127. }
  1128. }
  1129. }
  1130. for( int vi = 0; vi < data->var_count; vi++ )
  1131. {
  1132. left->set_num_valid(vi, (int)(nl));
  1133. right->set_num_valid(vi, (int)(nr));
  1134. }
  1135. // deallocate the parent node data that is not needed anymore
  1136. data->free_node_data(node);
  1137. }
  1138. static void auxMarkFeaturesInMap( const CvDTreeNode* node, Mat& featureMap)
  1139. {
  1140. if ( node && node->split )
  1141. {
  1142. featureMap.ptr<int>(0)[node->split->var_idx] = 1;
  1143. auxMarkFeaturesInMap( node->left, featureMap );
  1144. auxMarkFeaturesInMap( node->right, featureMap );
  1145. }
  1146. }
  1147. void CvCascadeBoostTree::markFeaturesInMap( Mat& featureMap )
  1148. {
  1149. auxMarkFeaturesInMap( root, featureMap );
  1150. }
  1151. //----------------------------------- CascadeBoost --------------------------------------
  1152. bool CvCascadeBoost::train( const CvFeatureEvaluator* _featureEvaluator,
  1153. int _numSamples,
  1154. int _precalcValBufSize, int _precalcIdxBufSize,
  1155. const CvCascadeBoostParams& _params )
  1156. {
  1157. bool isTrained = false;
  1158. CV_Assert( !data );
  1159. clear();
  1160. data = new CvCascadeBoostTrainData( _featureEvaluator, _numSamples,
  1161. _precalcValBufSize, _precalcIdxBufSize, _params );
  1162. CvMemStorage *storage = cvCreateMemStorage();
  1163. weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
  1164. storage = 0;
  1165. set_params( _params );
  1166. if ( (_params.boost_type == LOGIT) || (_params.boost_type == GENTLE) )
  1167. data->do_responses_copy();
  1168. update_weights( 0 );
  1169. cout << "+----+---------+---------+" << endl;
  1170. cout << "| N | HR | FA |" << endl;
  1171. cout << "+----+---------+---------+" << endl;
  1172. do
  1173. {
  1174. CvCascadeBoostTree* tree = new CvCascadeBoostTree;
  1175. if( !tree->train( data, subsample_mask, this ) )
  1176. {
  1177. delete tree;
  1178. break;
  1179. }
  1180. cvSeqPush( weak, &tree );
  1181. update_weights( tree );
  1182. trim_weights();
  1183. if( cvCountNonZero(subsample_mask) == 0 )
  1184. break;
  1185. }
  1186. while( !isErrDesired() && (weak->total < params.weak_count) );
  1187. if(weak->total > 0)
  1188. {
  1189. data->is_classifier = true;
  1190. data->free_train_data();
  1191. isTrained = true;
  1192. }
  1193. else
  1194. clear();
  1195. return isTrained;
  1196. }
  1197. float CvCascadeBoost::predict( int sampleIdx, bool returnSum ) const
  1198. {
  1199. CV_Assert( weak );
  1200. double sum = 0;
  1201. CvSeqReader reader;
  1202. cvStartReadSeq( weak, &reader );
  1203. cvSetSeqReaderPos( &reader, 0 );
  1204. for( int i = 0; i < weak->total; i++ )
  1205. {
  1206. CvBoostTree* wtree;
  1207. CV_READ_SEQ_ELEM( wtree, reader );
  1208. sum += ((CvCascadeBoostTree*)wtree)->predict(sampleIdx)->value;
  1209. }
  1210. if( !returnSum )
  1211. sum = sum < threshold - CV_THRESHOLD_EPS ? 0.0 : 1.0;
  1212. return (float)sum;
  1213. }
  1214. bool CvCascadeBoost::set_params( const CvBoostParams& _params )
  1215. {
  1216. minHitRate = ((CvCascadeBoostParams&)_params).minHitRate;
  1217. maxFalseAlarm = ((CvCascadeBoostParams&)_params).maxFalseAlarm;
  1218. return ( ( minHitRate > 0 ) && ( minHitRate < 1) &&
  1219. ( maxFalseAlarm > 0 ) && ( maxFalseAlarm < 1) &&
  1220. CvBoost::set_params( _params ));
  1221. }
  1222. void CvCascadeBoost::update_weights( CvBoostTree* tree )
  1223. {
  1224. int n = data->sample_count;
  1225. double sumW = 0.;
  1226. int step = 0;
  1227. float* fdata = 0;
  1228. int *sampleIdxBuf;
  1229. const int* sampleIdx = 0;
  1230. int inn_buf_size = ((params.boost_type == LOGIT) || (params.boost_type == GENTLE) ? n*sizeof(int) : 0) +
  1231. ( !tree ? n*sizeof(int) : 0 );
  1232. cv::AutoBuffer<uchar> inn_buf(inn_buf_size);
  1233. uchar* cur_inn_buf_pos = inn_buf.data();
  1234. if ( (params.boost_type == LOGIT) || (params.boost_type == GENTLE) )
  1235. {
  1236. step = CV_IS_MAT_CONT(data->responses_copy->type) ?
  1237. 1 : data->responses_copy->step / CV_ELEM_SIZE(data->responses_copy->type);
  1238. fdata = data->responses_copy->data.fl;
  1239. sampleIdxBuf = (int*)cur_inn_buf_pos; cur_inn_buf_pos = (uchar*)(sampleIdxBuf + n);
  1240. sampleIdx = data->get_sample_indices( data->data_root, sampleIdxBuf );
  1241. }
  1242. CvMat* buf = data->buf;
  1243. size_t length_buf_row = data->get_length_subbuf();
  1244. if( !tree ) // before training the first tree, initialize weights and other parameters
  1245. {
  1246. int* classLabelsBuf = (int*)cur_inn_buf_pos; cur_inn_buf_pos = (uchar*)(classLabelsBuf + n);
  1247. const int* classLabels = data->get_class_labels(data->data_root, classLabelsBuf);
  1248. // in case of logitboost and gentle adaboost each weak tree is a regression tree,
  1249. // so we need to convert class labels to floating-point values
  1250. double w0 = 1./n;
  1251. double p[2] = { 1, 1 };
  1252. cvReleaseMat( &orig_response );
  1253. cvReleaseMat( &sum_response );
  1254. cvReleaseMat( &weak_eval );
  1255. cvReleaseMat( &subsample_mask );
  1256. cvReleaseMat( &weights );
  1257. orig_response = cvCreateMat( 1, n, CV_32S );
  1258. weak_eval = cvCreateMat( 1, n, CV_64F );
  1259. subsample_mask = cvCreateMat( 1, n, CV_8U );
  1260. weights = cvCreateMat( 1, n, CV_64F );
  1261. subtree_weights = cvCreateMat( 1, n + 2, CV_64F );
  1262. if (data->is_buf_16u)
  1263. {
  1264. unsigned short* labels = (unsigned short*)(buf->data.s + data->data_root->buf_idx*length_buf_row +
  1265. data->data_root->offset + (size_t)(data->work_var_count-1)*data->sample_count);
  1266. for( int i = 0; i < n; i++ )
  1267. {
  1268. // save original categorical responses {0,1}, convert them to {-1,1}
  1269. orig_response->data.i[i] = classLabels[i]*2 - 1;
  1270. // make all the samples active at start.
  1271. // later, in trim_weights() deactivate/reactive again some, if need
  1272. subsample_mask->data.ptr[i] = (uchar)1;
  1273. // make all the initial weights the same.
  1274. weights->data.db[i] = w0*p[classLabels[i]];
  1275. // set the labels to find (from within weak tree learning proc)
  1276. // the particular sample weight, and where to store the response.
  1277. labels[i] = (unsigned short)i;
  1278. }
  1279. }
  1280. else
  1281. {
  1282. int* labels = buf->data.i + data->data_root->buf_idx*length_buf_row +
  1283. data->data_root->offset + (size_t)(data->work_var_count-1)*data->sample_count;
  1284. for( int i = 0; i < n; i++ )
  1285. {
  1286. // save original categorical responses {0,1}, convert them to {-1,1}
  1287. orig_response->data.i[i] = classLabels[i]*2 - 1;
  1288. subsample_mask->data.ptr[i] = (uchar)1;
  1289. weights->data.db[i] = w0*p[classLabels[i]];
  1290. labels[i] = i;
  1291. }
  1292. }
  1293. if( params.boost_type == LOGIT )
  1294. {
  1295. sum_response = cvCreateMat( 1, n, CV_64F );
  1296. for( int i = 0; i < n; i++ )
  1297. {
  1298. sum_response->data.db[i] = 0;
  1299. fdata[sampleIdx[i]*step] = orig_response->data.i[i] > 0 ? 2.f : -2.f;
  1300. }
  1301. // in case of logitboost each weak tree is a regression tree.
  1302. // the target function values are recalculated for each of the trees
  1303. data->is_classifier = false;
  1304. }
  1305. else if( params.boost_type == GENTLE )
  1306. {
  1307. for( int i = 0; i < n; i++ )
  1308. fdata[sampleIdx[i]*step] = (float)orig_response->data.i[i];
  1309. data->is_classifier = false;
  1310. }
  1311. }
  1312. else
  1313. {
  1314. // at this moment, for all the samples that participated in the training of the most
  1315. // recent weak classifier we know the responses. For other samples we need to compute them
  1316. if( have_subsample )
  1317. {
  1318. // invert the subsample mask
  1319. cvXorS( subsample_mask, cvScalar(1.), subsample_mask );
  1320. // run tree through all the non-processed samples
  1321. for( int i = 0; i < n; i++ )
  1322. if( subsample_mask->data.ptr[i] )
  1323. {
  1324. weak_eval->data.db[i] = ((CvCascadeBoostTree*)tree)->predict( i )->value;
  1325. }
  1326. }
  1327. // now update weights and other parameters for each type of boosting
  1328. if( params.boost_type == DISCRETE )
  1329. {
  1330. // Discrete AdaBoost:
  1331. // weak_eval[i] (=f(x_i)) is in {-1,1}
  1332. // err = sum(w_i*(f(x_i) != y_i))/sum(w_i)
  1333. // C = log((1-err)/err)
  1334. // w_i *= exp(C*(f(x_i) != y_i))
  1335. double C, err = 0.;
  1336. double scale[] = { 1., 0. };
  1337. for( int i = 0; i < n; i++ )
  1338. {
  1339. double w = weights->data.db[i];
  1340. sumW += w;
  1341. err += w*(weak_eval->data.db[i] != orig_response->data.i[i]);
  1342. }
  1343. if( sumW != 0 )
  1344. err /= sumW;
  1345. C = err = -logRatio( err );
  1346. scale[1] = exp(err);
  1347. sumW = 0;
  1348. for( int i = 0; i < n; i++ )
  1349. {
  1350. double w = weights->data.db[i]*
  1351. scale[weak_eval->data.db[i] != orig_response->data.i[i]];
  1352. sumW += w;
  1353. weights->data.db[i] = w;
  1354. }
  1355. tree->scale( C );
  1356. }
  1357. else if( params.boost_type == REAL )
  1358. {
  1359. // Real AdaBoost:
  1360. // weak_eval[i] = f(x_i) = 0.5*log(p(x_i)/(1-p(x_i))), p(x_i)=P(y=1|x_i)
  1361. // w_i *= exp(-y_i*f(x_i))
  1362. for( int i = 0; i < n; i++ )
  1363. weak_eval->data.db[i] *= -orig_response->data.i[i];
  1364. cvExp( weak_eval, weak_eval );
  1365. for( int i = 0; i < n; i++ )
  1366. {
  1367. double w = weights->data.db[i]*weak_eval->data.db[i];
  1368. sumW += w;
  1369. weights->data.db[i] = w;
  1370. }
  1371. }
  1372. else if( params.boost_type == LOGIT )
  1373. {
  1374. // LogitBoost:
  1375. // weak_eval[i] = f(x_i) in [-z_max,z_max]
  1376. // sum_response = F(x_i).
  1377. // F(x_i) += 0.5*f(x_i)
  1378. // p(x_i) = exp(F(x_i))/(exp(F(x_i)) + exp(-F(x_i))=1/(1+exp(-2*F(x_i)))
  1379. // reuse weak_eval: weak_eval[i] <- p(x_i)
  1380. // w_i = p(x_i)*1(1 - p(x_i))
  1381. // z_i = ((y_i+1)/2 - p(x_i))/(p(x_i)*(1 - p(x_i)))
  1382. // store z_i to the data->data_root as the new target responses
  1383. const double lbWeightThresh = FLT_EPSILON;
  1384. const double lbZMax = 10.;
  1385. for( int i = 0; i < n; i++ )
  1386. {
  1387. double s = sum_response->data.db[i] + 0.5*weak_eval->data.db[i];
  1388. sum_response->data.db[i] = s;
  1389. weak_eval->data.db[i] = -2*s;
  1390. }
  1391. cvExp( weak_eval, weak_eval );
  1392. for( int i = 0; i < n; i++ )
  1393. {
  1394. double p = 1./(1. + weak_eval->data.db[i]);
  1395. double w = p*(1 - p), z;
  1396. w = MAX( w, lbWeightThresh );
  1397. weights->data.db[i] = w;
  1398. sumW += w;
  1399. if( orig_response->data.i[i] > 0 )
  1400. {
  1401. z = 1./p;
  1402. fdata[sampleIdx[i]*step] = (float)min(z, lbZMax);
  1403. }
  1404. else
  1405. {
  1406. z = 1./(1-p);
  1407. fdata[sampleIdx[i]*step] = (float)-min(z, lbZMax);
  1408. }
  1409. }
  1410. }
  1411. else
  1412. {
  1413. // Gentle AdaBoost:
  1414. // weak_eval[i] = f(x_i) in [-1,1]
  1415. // w_i *= exp(-y_i*f(x_i))
  1416. assert( params.boost_type == GENTLE );
  1417. for( int i = 0; i < n; i++ )
  1418. weak_eval->data.db[i] *= -orig_response->data.i[i];
  1419. cvExp( weak_eval, weak_eval );
  1420. for( int i = 0; i < n; i++ )
  1421. {
  1422. double w = weights->data.db[i] * weak_eval->data.db[i];
  1423. weights->data.db[i] = w;
  1424. sumW += w;
  1425. }
  1426. }
  1427. }
  1428. // renormalize weights
  1429. if( sumW > FLT_EPSILON )
  1430. {
  1431. sumW = 1./sumW;
  1432. for( int i = 0; i < n; ++i )
  1433. weights->data.db[i] *= sumW;
  1434. }
  1435. }
  1436. bool CvCascadeBoost::isErrDesired()
  1437. {
  1438. int sCount = data->sample_count,
  1439. numPos = 0, numNeg = 0, numFalse = 0, numPosTrue = 0;
  1440. vector<float> eval(sCount);
  1441. for( int i = 0; i < sCount; i++ )
  1442. if( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 1.0F )
  1443. eval[numPos++] = predict( i, true );
  1444. std::sort(&eval[0], &eval[0] + numPos);
  1445. int thresholdIdx = (int)((1.0F - minHitRate) * numPos);
  1446. threshold = eval[ thresholdIdx ];
  1447. numPosTrue = numPos - thresholdIdx;
  1448. for( int i = thresholdIdx - 1; i >= 0; i--)
  1449. if ( abs( eval[i] - threshold) < FLT_EPSILON )
  1450. numPosTrue++;
  1451. float hitRate = ((float) numPosTrue) / ((float) numPos);
  1452. for( int i = 0; i < sCount; i++ )
  1453. {
  1454. if( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 0.0F )
  1455. {
  1456. numNeg++;
  1457. if( predict( i ) )
  1458. numFalse++;
  1459. }
  1460. }
  1461. float falseAlarm = ((float) numFalse) / ((float) numNeg);
  1462. cout << "|"; cout.width(4); cout << right << weak->total;
  1463. cout << "|"; cout.width(9); cout << right << hitRate;
  1464. cout << "|"; cout.width(9); cout << right << falseAlarm;
  1465. cout << "|" << endl;
  1466. cout << "+----+---------+---------+" << endl;
  1467. return falseAlarm <= maxFalseAlarm;
  1468. }
  1469. void CvCascadeBoost::write( FileStorage &fs, const Mat& featureMap ) const
  1470. {
  1471. // char cmnt[30];
  1472. CvCascadeBoostTree* weakTree;
  1473. fs << CC_WEAK_COUNT << weak->total;
  1474. fs << CC_STAGE_THRESHOLD << threshold;
  1475. fs << CC_WEAK_CLASSIFIERS << "[";
  1476. for( int wi = 0; wi < weak->total; wi++)
  1477. {
  1478. /*snprintf( cmnt, sizeof(cmnt), "tree %i", wi );
  1479. cvWriteComment( fs, cmnt, 0 );*/
  1480. weakTree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
  1481. weakTree->write( fs, featureMap );
  1482. }
  1483. fs << "]";
  1484. }
  1485. bool CvCascadeBoost::read( const FileNode &node,
  1486. const CvFeatureEvaluator* _featureEvaluator,
  1487. const CvCascadeBoostParams& _params )
  1488. {
  1489. CvMemStorage* storage;
  1490. clear();
  1491. data = new CvCascadeBoostTrainData( _featureEvaluator, _params );
  1492. set_params( _params );
  1493. node[CC_STAGE_THRESHOLD] >> threshold;
  1494. FileNode rnode = node[CC_WEAK_CLASSIFIERS];
  1495. storage = cvCreateMemStorage();
  1496. weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
  1497. for( FileNodeIterator it = rnode.begin(); it != rnode.end(); it++ )
  1498. {
  1499. CvCascadeBoostTree* tree = new CvCascadeBoostTree();
  1500. tree->read( *it, this, data );
  1501. cvSeqPush( weak, &tree );
  1502. }
  1503. return true;
  1504. }
  1505. void CvCascadeBoost::markUsedFeaturesInMap( Mat& featureMap )
  1506. {
  1507. for( int wi = 0; wi < weak->total; wi++ )
  1508. {
  1509. CvCascadeBoostTree* weakTree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
  1510. weakTree->markFeaturesInMap( featureMap );
  1511. }
  1512. }