kmeans_index.h 67 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832
  1. /***********************************************************************
  2. * Software License Agreement (BSD License)
  3. *
  4. * Copyright 2008-2009 Marius Muja (mariusm@cs.ubc.ca). All rights reserved.
  5. * Copyright 2008-2009 David G. Lowe (lowe@cs.ubc.ca). All rights reserved.
  6. *
  7. * THE BSD LICENSE
  8. *
  9. * Redistribution and use in source and binary forms, with or without
  10. * modification, are permitted provided that the following conditions
  11. * are met:
  12. *
  13. * 1. Redistributions of source code must retain the above copyright
  14. * notice, this list of conditions and the following disclaimer.
  15. * 2. Redistributions in binary form must reproduce the above copyright
  16. * notice, this list of conditions and the following disclaimer in the
  17. * documentation and/or other materials provided with the distribution.
  18. *
  19. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  20. * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  21. * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  22. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  23. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  24. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  25. * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  26. * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  27. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  28. * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  29. *************************************************************************/
  30. #ifndef OPENCV_FLANN_KMEANS_INDEX_H_
  31. #define OPENCV_FLANN_KMEANS_INDEX_H_
  32. //! @cond IGNORED
  33. #include <algorithm>
  34. #include <map>
  35. #include <limits>
  36. #include <cmath>
  37. #include "general.h"
  38. #include "nn_index.h"
  39. #include "dist.h"
  40. #include "matrix.h"
  41. #include "result_set.h"
  42. #include "heap.h"
  43. #include "allocator.h"
  44. #include "random.h"
  45. #include "saving.h"
  46. #include "logger.h"
  47. #define BITS_PER_CHAR 8
  48. #define BITS_PER_BASE 2 // for DNA/RNA sequences
  49. #define BASE_PER_CHAR (BITS_PER_CHAR/BITS_PER_BASE)
  50. #define HISTOS_PER_BASE (1<<BITS_PER_BASE)
  51. namespace cvflann
  52. {
  53. struct KMeansIndexParams : public IndexParams
  54. {
  55. void indexParams(int branching, int iterations,
  56. flann_centers_init_t centers_init, float cb_index, int trees)
  57. {
  58. (*this)["algorithm"] = FLANN_INDEX_KMEANS;
  59. // branching factor
  60. (*this)["branching"] = branching;
  61. // max iterations to perform in one kmeans clustering (kmeans tree)
  62. (*this)["iterations"] = iterations;
  63. // algorithm used for picking the initial cluster centers for kmeans tree
  64. (*this)["centers_init"] = centers_init;
  65. // cluster boundary index. Used when searching the kmeans tree
  66. (*this)["cb_index"] = cb_index;
  67. // number of kmeans trees to search in
  68. (*this)["trees"] = trees;
  69. }
  70. KMeansIndexParams(int branching = 32, int iterations = 11,
  71. flann_centers_init_t centers_init = FLANN_CENTERS_RANDOM, float cb_index = 0.2 )
  72. {
  73. indexParams(branching, iterations, centers_init, cb_index, 1);
  74. }
  75. KMeansIndexParams(int branching, int iterations,
  76. flann_centers_init_t centers_init, float cb_index, int trees)
  77. {
  78. indexParams(branching, iterations, centers_init, cb_index, trees);
  79. }
  80. };
  81. /**
  82. * Hierarchical kmeans index
  83. *
  84. * Contains a tree constructed through a hierarchical kmeans clustering
  85. * and other information for indexing a set of points for nearest-neighbour matching.
  86. */
  87. template <typename Distance>
  88. class KMeansIndex : public NNIndex<Distance>
  89. {
  90. public:
  91. typedef typename Distance::ElementType ElementType;
  92. typedef typename Distance::ResultType DistanceType;
  93. typedef typename Distance::CentersType CentersType;
  94. typedef typename Distance::is_kdtree_distance is_kdtree_distance;
  95. typedef typename Distance::is_vector_space_distance is_vector_space_distance;
  96. typedef void (KMeansIndex::* centersAlgFunction)(int, int*, int, int*, int&);
  97. /**
  98. * The function used for choosing the cluster centers.
  99. */
  100. centersAlgFunction chooseCenters;
  101. /**
  102. * Chooses the initial centers in the k-means clustering in a random manner.
  103. *
  104. * Params:
  105. * k = number of centers
  106. * vecs = the dataset of points
  107. * indices = indices in the dataset
  108. * indices_length = length of indices vector
  109. *
  110. */
  111. void chooseCentersRandom(int k, int* indices, int indices_length, int* centers, int& centers_length)
  112. {
  113. UniqueRandom r(indices_length);
  114. int index;
  115. for (index=0; index<k; ++index) {
  116. bool duplicate = true;
  117. int rnd;
  118. while (duplicate) {
  119. duplicate = false;
  120. rnd = r.next();
  121. if (rnd<0) {
  122. centers_length = index;
  123. return;
  124. }
  125. centers[index] = indices[rnd];
  126. for (int j=0; j<index; ++j) {
  127. DistanceType sq = distance_(dataset_[centers[index]], dataset_[centers[j]], dataset_.cols);
  128. if (sq<1e-16) {
  129. duplicate = true;
  130. }
  131. }
  132. }
  133. }
  134. centers_length = index;
  135. }
  136. /**
  137. * Chooses the initial centers in the k-means using Gonzales' algorithm
  138. * so that the centers are spaced apart from each other.
  139. *
  140. * Params:
  141. * k = number of centers
  142. * vecs = the dataset of points
  143. * indices = indices in the dataset
  144. * Returns:
  145. */
  146. void chooseCentersGonzales(int k, int* indices, int indices_length, int* centers, int& centers_length)
  147. {
  148. int n = indices_length;
  149. int rnd = rand_int(n);
  150. CV_DbgAssert(rnd >=0 && rnd < n);
  151. centers[0] = indices[rnd];
  152. int index;
  153. for (index=1; index<k; ++index) {
  154. int best_index = -1;
  155. DistanceType best_val = 0;
  156. for (int j=0; j<n; ++j) {
  157. DistanceType dist = distance_(dataset_[centers[0]],dataset_[indices[j]],dataset_.cols);
  158. for (int i=1; i<index; ++i) {
  159. DistanceType tmp_dist = distance_(dataset_[centers[i]],dataset_[indices[j]],dataset_.cols);
  160. if (tmp_dist<dist) {
  161. dist = tmp_dist;
  162. }
  163. }
  164. if (dist>best_val) {
  165. best_val = dist;
  166. best_index = j;
  167. }
  168. }
  169. if (best_index!=-1) {
  170. centers[index] = indices[best_index];
  171. }
  172. else {
  173. break;
  174. }
  175. }
  176. centers_length = index;
  177. }
  178. /**
  179. * Chooses the initial centers in the k-means using the algorithm
  180. * proposed in the KMeans++ paper:
  181. * Arthur, David; Vassilvitskii, Sergei - k-means++: The Advantages of Careful Seeding
  182. *
  183. * Implementation of this function was converted from the one provided in Arthur's code.
  184. *
  185. * Params:
  186. * k = number of centers
  187. * vecs = the dataset of points
  188. * indices = indices in the dataset
  189. * Returns:
  190. */
  191. void chooseCentersKMeanspp(int k, int* indices, int indices_length, int* centers, int& centers_length)
  192. {
  193. int n = indices_length;
  194. double currentPot = 0;
  195. DistanceType* closestDistSq = new DistanceType[n];
  196. // Choose one random center and set the closestDistSq values
  197. int index = rand_int(n);
  198. CV_DbgAssert(index >=0 && index < n);
  199. centers[0] = indices[index];
  200. for (int i = 0; i < n; i++) {
  201. closestDistSq[i] = distance_(dataset_[indices[i]], dataset_[indices[index]], dataset_.cols);
  202. closestDistSq[i] = ensureSquareDistance<Distance>( closestDistSq[i] );
  203. currentPot += closestDistSq[i];
  204. }
  205. const int numLocalTries = 1;
  206. // Choose each center
  207. int centerCount;
  208. for (centerCount = 1; centerCount < k; centerCount++) {
  209. // Repeat several trials
  210. double bestNewPot = -1;
  211. int bestNewIndex = -1;
  212. for (int localTrial = 0; localTrial < numLocalTries; localTrial++) {
  213. // Choose our center - have to be slightly careful to return a valid answer even accounting
  214. // for possible rounding errors
  215. double randVal = rand_double(currentPot);
  216. for (index = 0; index < n-1; index++) {
  217. if (randVal <= closestDistSq[index]) break;
  218. else randVal -= closestDistSq[index];
  219. }
  220. // Compute the new potential
  221. double newPot = 0;
  222. for (int i = 0; i < n; i++) {
  223. DistanceType dist = distance_(dataset_[indices[i]], dataset_[indices[index]], dataset_.cols);
  224. newPot += std::min( ensureSquareDistance<Distance>(dist), closestDistSq[i] );
  225. }
  226. // Store the best result
  227. if ((bestNewPot < 0)||(newPot < bestNewPot)) {
  228. bestNewPot = newPot;
  229. bestNewIndex = index;
  230. }
  231. }
  232. // Add the appropriate center
  233. centers[centerCount] = indices[bestNewIndex];
  234. currentPot = bestNewPot;
  235. for (int i = 0; i < n; i++) {
  236. DistanceType dist = distance_(dataset_[indices[i]], dataset_[indices[bestNewIndex]], dataset_.cols);
  237. closestDistSq[i] = std::min( ensureSquareDistance<Distance>(dist), closestDistSq[i] );
  238. }
  239. }
  240. centers_length = centerCount;
  241. delete[] closestDistSq;
  242. }
  243. public:
  244. flann_algorithm_t getType() const CV_OVERRIDE
  245. {
  246. return FLANN_INDEX_KMEANS;
  247. }
  248. template<class CentersContainerType>
  249. class KMeansDistanceComputer : public cv::ParallelLoopBody
  250. {
  251. public:
  252. KMeansDistanceComputer(Distance _distance, const Matrix<ElementType>& _dataset,
  253. const int _branching, const int* _indices, const CentersContainerType& _dcenters,
  254. const size_t _veclen, std::vector<int> &_new_centroids,
  255. std::vector<DistanceType> &_sq_dists)
  256. : distance(_distance)
  257. , dataset(_dataset)
  258. , branching(_branching)
  259. , indices(_indices)
  260. , dcenters(_dcenters)
  261. , veclen(_veclen)
  262. , new_centroids(_new_centroids)
  263. , sq_dists(_sq_dists)
  264. {
  265. }
  266. void operator()(const cv::Range& range) const CV_OVERRIDE
  267. {
  268. const int begin = range.start;
  269. const int end = range.end;
  270. for( int i = begin; i<end; ++i)
  271. {
  272. DistanceType sq_dist(distance(dataset[indices[i]], dcenters[0], veclen));
  273. int new_centroid(0);
  274. for (int j=1; j<branching; ++j) {
  275. DistanceType new_sq_dist = distance(dataset[indices[i]], dcenters[j], veclen);
  276. if (sq_dist>new_sq_dist) {
  277. new_centroid = j;
  278. sq_dist = new_sq_dist;
  279. }
  280. }
  281. sq_dists[i] = sq_dist;
  282. new_centroids[i] = new_centroid;
  283. }
  284. }
  285. private:
  286. Distance distance;
  287. const Matrix<ElementType>& dataset;
  288. const int branching;
  289. const int* indices;
  290. const CentersContainerType& dcenters;
  291. const size_t veclen;
  292. std::vector<int> &new_centroids;
  293. std::vector<DistanceType> &sq_dists;
  294. KMeansDistanceComputer& operator=( const KMeansDistanceComputer & ) { return *this; }
  295. };
  296. /**
  297. * Index constructor
  298. *
  299. * Params:
  300. * inputData = dataset with the input features
  301. * params = parameters passed to the hierarchical k-means algorithm
  302. */
  303. KMeansIndex(const Matrix<ElementType>& inputData, const IndexParams& params = KMeansIndexParams(),
  304. Distance d = Distance())
  305. : dataset_(inputData), index_params_(params), root_(NULL), indices_(NULL), distance_(d)
  306. {
  307. memoryCounter_ = 0;
  308. size_ = dataset_.rows;
  309. veclen_ = dataset_.cols;
  310. branching_ = get_param(params,"branching",32);
  311. trees_ = get_param(params,"trees",1);
  312. iterations_ = get_param(params,"iterations",11);
  313. if (iterations_<0) {
  314. iterations_ = (std::numeric_limits<int>::max)();
  315. }
  316. centers_init_ = get_param(params,"centers_init",FLANN_CENTERS_RANDOM);
  317. if (centers_init_==FLANN_CENTERS_RANDOM) {
  318. chooseCenters = &KMeansIndex::chooseCentersRandom;
  319. }
  320. else if (centers_init_==FLANN_CENTERS_GONZALES) {
  321. chooseCenters = &KMeansIndex::chooseCentersGonzales;
  322. }
  323. else if (centers_init_==FLANN_CENTERS_KMEANSPP) {
  324. chooseCenters = &KMeansIndex::chooseCentersKMeanspp;
  325. }
  326. else {
  327. FLANN_THROW(cv::Error::StsBadArg, "Unknown algorithm for choosing initial centers.");
  328. }
  329. cb_index_ = 0.4f;
  330. root_ = new KMeansNodePtr[trees_];
  331. indices_ = new int*[trees_];
  332. for (int i=0; i<trees_; ++i) {
  333. root_[i] = NULL;
  334. indices_[i] = NULL;
  335. }
  336. }
  337. KMeansIndex(const KMeansIndex&);
  338. KMeansIndex& operator=(const KMeansIndex&);
  339. /**
  340. * Index destructor.
  341. *
  342. * Release the memory used by the index.
  343. */
  344. virtual ~KMeansIndex()
  345. {
  346. if (root_ != NULL) {
  347. free_centers();
  348. delete[] root_;
  349. }
  350. if (indices_!=NULL) {
  351. free_indices();
  352. delete[] indices_;
  353. }
  354. }
  355. /**
  356. * Returns size of index.
  357. */
  358. size_t size() const CV_OVERRIDE
  359. {
  360. return size_;
  361. }
  362. /**
  363. * Returns the length of an index feature.
  364. */
  365. size_t veclen() const CV_OVERRIDE
  366. {
  367. return veclen_;
  368. }
  369. void set_cb_index( float index)
  370. {
  371. cb_index_ = index;
  372. }
  373. /**
  374. * Computes the inde memory usage
  375. * Returns: memory used by the index
  376. */
  377. int usedMemory() const CV_OVERRIDE
  378. {
  379. return pool_.usedMemory+pool_.wastedMemory+memoryCounter_;
  380. }
  381. /**
  382. * Builds the index
  383. */
  384. void buildIndex() CV_OVERRIDE
  385. {
  386. if (branching_<2) {
  387. FLANN_THROW(cv::Error::StsError, "Branching factor must be at least 2");
  388. }
  389. free_indices();
  390. for (int i=0; i<trees_; ++i) {
  391. indices_[i] = new int[size_];
  392. for (size_t j=0; j<size_; ++j) {
  393. indices_[i][j] = int(j);
  394. }
  395. root_[i] = pool_.allocate<KMeansNode>();
  396. std::memset(root_[i], 0, sizeof(KMeansNode));
  397. Distance* dummy = NULL;
  398. computeNodeStatistics(root_[i], indices_[i], (unsigned int)size_, dummy);
  399. computeClustering(root_[i], indices_[i], (int)size_, branching_,0);
  400. }
  401. }
  402. void saveIndex(FILE* stream) CV_OVERRIDE
  403. {
  404. save_value(stream, branching_);
  405. save_value(stream, iterations_);
  406. save_value(stream, memoryCounter_);
  407. save_value(stream, cb_index_);
  408. save_value(stream, trees_);
  409. for (int i=0; i<trees_; ++i) {
  410. save_value(stream, *indices_[i], (int)size_);
  411. save_tree(stream, root_[i], i);
  412. }
  413. }
  414. void loadIndex(FILE* stream) CV_OVERRIDE
  415. {
  416. if (indices_!=NULL) {
  417. free_indices();
  418. delete[] indices_;
  419. }
  420. if (root_!=NULL) {
  421. free_centers();
  422. }
  423. load_value(stream, branching_);
  424. load_value(stream, iterations_);
  425. load_value(stream, memoryCounter_);
  426. load_value(stream, cb_index_);
  427. load_value(stream, trees_);
  428. indices_ = new int*[trees_];
  429. for (int i=0; i<trees_; ++i) {
  430. indices_[i] = new int[size_];
  431. load_value(stream, *indices_[i], size_);
  432. load_tree(stream, root_[i], i);
  433. }
  434. index_params_["algorithm"] = getType();
  435. index_params_["branching"] = branching_;
  436. index_params_["trees"] = trees_;
  437. index_params_["iterations"] = iterations_;
  438. index_params_["centers_init"] = centers_init_;
  439. index_params_["cb_index"] = cb_index_;
  440. }
  441. /**
  442. * Find set of nearest neighbors to vec. Their indices are stored inside
  443. * the result object.
  444. *
  445. * Params:
  446. * result = the result object in which the indices of the nearest-neighbors are stored
  447. * vec = the vector for which to search the nearest neighbors
  448. * searchParams = parameters that influence the search algorithm (checks, cb_index)
  449. */
  450. void findNeighbors(ResultSet<DistanceType>& result, const ElementType* vec, const SearchParams& searchParams) CV_OVERRIDE
  451. {
  452. const int maxChecks = get_param(searchParams,"checks",32);
  453. if (maxChecks==FLANN_CHECKS_UNLIMITED) {
  454. findExactNN(root_[0], result, vec);
  455. }
  456. else {
  457. // Priority queue storing intermediate branches in the best-bin-first search
  458. Heap<BranchSt>* heap = new Heap<BranchSt>((int)size_);
  459. int checks = 0;
  460. for (int i=0; i<trees_; ++i) {
  461. findNN(root_[i], result, vec, checks, maxChecks, heap);
  462. if ((checks >= maxChecks) && result.full())
  463. break;
  464. }
  465. BranchSt branch;
  466. while (heap->popMin(branch) && (checks<maxChecks || !result.full())) {
  467. KMeansNodePtr node = branch.node;
  468. findNN(node, result, vec, checks, maxChecks, heap);
  469. }
  470. delete heap;
  471. CV_Assert(result.full());
  472. }
  473. }
  474. /**
  475. * Clustering function that takes a cut in the hierarchical k-means
  476. * tree and return the clusters centers of that clustering.
  477. * Params:
  478. * numClusters = number of clusters to have in the clustering computed
  479. * Returns: number of cluster centers
  480. */
  481. int getClusterCenters(Matrix<CentersType>& centers)
  482. {
  483. int numClusters = centers.rows;
  484. if (numClusters<1) {
  485. FLANN_THROW(cv::Error::StsBadArg, "Number of clusters must be at least 1");
  486. }
  487. DistanceType variance;
  488. KMeansNodePtr* clusters = new KMeansNodePtr[numClusters];
  489. int clusterCount = getMinVarianceClusters(root_[0], clusters, numClusters, variance);
  490. Logger::info("Clusters requested: %d, returning %d\n",numClusters, clusterCount);
  491. for (int i=0; i<clusterCount; ++i) {
  492. CentersType* center = clusters[i]->pivot;
  493. for (size_t j=0; j<veclen_; ++j) {
  494. centers[i][j] = center[j];
  495. }
  496. }
  497. delete[] clusters;
  498. return clusterCount;
  499. }
  500. IndexParams getParameters() const CV_OVERRIDE
  501. {
  502. return index_params_;
  503. }
  504. private:
  505. /**
  506. * Structure representing a node in the hierarchical k-means tree.
  507. */
  508. struct KMeansNode
  509. {
  510. /**
  511. * The cluster center.
  512. */
  513. CentersType* pivot;
  514. /**
  515. * The cluster radius.
  516. */
  517. DistanceType radius;
  518. /**
  519. * The cluster mean radius.
  520. */
  521. DistanceType mean_radius;
  522. /**
  523. * The cluster variance.
  524. */
  525. DistanceType variance;
  526. /**
  527. * The cluster size (number of points in the cluster)
  528. */
  529. int size;
  530. /**
  531. * Child nodes (only for non-terminal nodes)
  532. */
  533. KMeansNode** childs;
  534. /**
  535. * Node points (only for terminal nodes)
  536. */
  537. int* indices;
  538. /**
  539. * Level
  540. */
  541. int level;
  542. };
  543. typedef KMeansNode* KMeansNodePtr;
  544. /**
  545. * Alias definition for a nicer syntax.
  546. */
  547. typedef BranchStruct<KMeansNodePtr, DistanceType> BranchSt;
  548. void save_tree(FILE* stream, KMeansNodePtr node, int num)
  549. {
  550. save_value(stream, *node);
  551. save_value(stream, *(node->pivot), (int)veclen_);
  552. if (node->childs==NULL) {
  553. int indices_offset = (int)(node->indices - indices_[num]);
  554. save_value(stream, indices_offset);
  555. }
  556. else {
  557. for(int i=0; i<branching_; ++i) {
  558. save_tree(stream, node->childs[i], num);
  559. }
  560. }
  561. }
  562. void load_tree(FILE* stream, KMeansNodePtr& node, int num)
  563. {
  564. node = pool_.allocate<KMeansNode>();
  565. load_value(stream, *node);
  566. node->pivot = new CentersType[veclen_];
  567. load_value(stream, *(node->pivot), (int)veclen_);
  568. if (node->childs==NULL) {
  569. int indices_offset;
  570. load_value(stream, indices_offset);
  571. node->indices = indices_[num] + indices_offset;
  572. }
  573. else {
  574. node->childs = pool_.allocate<KMeansNodePtr>(branching_);
  575. for(int i=0; i<branching_; ++i) {
  576. load_tree(stream, node->childs[i], num);
  577. }
  578. }
  579. }
  580. /**
  581. * Helper function
  582. */
  583. void free_centers(KMeansNodePtr node)
  584. {
  585. delete[] node->pivot;
  586. if (node->childs!=NULL) {
  587. for (int k=0; k<branching_; ++k) {
  588. free_centers(node->childs[k]);
  589. }
  590. }
  591. }
  592. void free_centers()
  593. {
  594. if (root_ != NULL) {
  595. for(int i=0; i<trees_; ++i) {
  596. if (root_[i] != NULL) {
  597. free_centers(root_[i]);
  598. }
  599. }
  600. }
  601. }
  602. /**
  603. * Release the inner elements of indices[]
  604. */
  605. void free_indices()
  606. {
  607. if (indices_!=NULL) {
  608. for(int i=0; i<trees_; ++i) {
  609. if (indices_[i]!=NULL) {
  610. delete[] indices_[i];
  611. indices_[i] = NULL;
  612. }
  613. }
  614. }
  615. }
  616. /**
  617. * Computes the statistics of a node (mean, radius, variance).
  618. *
  619. * Params:
  620. * node = the node to use
  621. * indices = array of indices of the points belonging to the node
  622. * indices_length = number of indices in the array
  623. */
  624. void computeNodeStatistics(KMeansNodePtr node, int* indices, unsigned int indices_length)
  625. {
  626. DistanceType variance = 0;
  627. CentersType* mean = new CentersType[veclen_];
  628. memoryCounter_ += int(veclen_*sizeof(CentersType));
  629. memset(mean,0,veclen_*sizeof(CentersType));
  630. for (unsigned int i=0; i<indices_length; ++i) {
  631. ElementType* vec = dataset_[indices[i]];
  632. for (size_t j=0; j<veclen_; ++j) {
  633. mean[j] += vec[j];
  634. }
  635. variance += distance_(vec, ZeroIterator<ElementType>(), veclen_);
  636. }
  637. float length = static_cast<float>(indices_length);
  638. for (size_t j=0; j<veclen_; ++j) {
  639. mean[j] = cvflann::round<CentersType>( mean[j] / static_cast<double>(indices_length) );
  640. }
  641. variance /= static_cast<DistanceType>( length );
  642. variance -= distance_(mean, ZeroIterator<ElementType>(), veclen_);
  643. DistanceType radius = 0;
  644. for (unsigned int i=0; i<indices_length; ++i) {
  645. DistanceType tmp = distance_(mean, dataset_[indices[i]], veclen_);
  646. if (tmp>radius) {
  647. radius = tmp;
  648. }
  649. }
  650. node->variance = variance;
  651. node->radius = radius;
  652. node->pivot = mean;
  653. }
  654. void computeBitfieldNodeStatistics(KMeansNodePtr node, int* indices,
  655. unsigned int indices_length)
  656. {
  657. const unsigned int accumulator_veclen = static_cast<unsigned int>(
  658. veclen_*sizeof(CentersType)*BITS_PER_CHAR);
  659. unsigned long long variance = 0ull;
  660. CentersType* mean = new CentersType[veclen_];
  661. memoryCounter_ += int(veclen_*sizeof(CentersType));
  662. unsigned int* mean_accumulator = new unsigned int[accumulator_veclen];
  663. memset(mean_accumulator, 0, sizeof(unsigned int)*accumulator_veclen);
  664. for (unsigned int i=0; i<indices_length; ++i) {
  665. variance += static_cast<unsigned long long>( ensureSquareDistance<Distance>(
  666. distance_(dataset_[indices[i]], ZeroIterator<ElementType>(), veclen_)));
  667. unsigned char* vec = (unsigned char*)dataset_[indices[i]];
  668. for (size_t k=0, l=0; k<accumulator_veclen; k+=BITS_PER_CHAR, ++l) {
  669. mean_accumulator[k] += (vec[l]) & 0x01;
  670. mean_accumulator[k+1] += (vec[l]>>1) & 0x01;
  671. mean_accumulator[k+2] += (vec[l]>>2) & 0x01;
  672. mean_accumulator[k+3] += (vec[l]>>3) & 0x01;
  673. mean_accumulator[k+4] += (vec[l]>>4) & 0x01;
  674. mean_accumulator[k+5] += (vec[l]>>5) & 0x01;
  675. mean_accumulator[k+6] += (vec[l]>>6) & 0x01;
  676. mean_accumulator[k+7] += (vec[l]>>7) & 0x01;
  677. }
  678. }
  679. double cnt = static_cast<double>(indices_length);
  680. unsigned char* char_mean = (unsigned char*)mean;
  681. for (size_t k=0, l=0; k<accumulator_veclen; k+=BITS_PER_CHAR, ++l) {
  682. char_mean[l] = static_cast<unsigned char>(
  683. (((int)(0.5 + (double)(mean_accumulator[k]) / cnt)))
  684. | (((int)(0.5 + (double)(mean_accumulator[k+1]) / cnt))<<1)
  685. | (((int)(0.5 + (double)(mean_accumulator[k+2]) / cnt))<<2)
  686. | (((int)(0.5 + (double)(mean_accumulator[k+3]) / cnt))<<3)
  687. | (((int)(0.5 + (double)(mean_accumulator[k+4]) / cnt))<<4)
  688. | (((int)(0.5 + (double)(mean_accumulator[k+5]) / cnt))<<5)
  689. | (((int)(0.5 + (double)(mean_accumulator[k+6]) / cnt))<<6)
  690. | (((int)(0.5 + (double)(mean_accumulator[k+7]) / cnt))<<7));
  691. }
  692. variance = static_cast<unsigned long long>(
  693. 0.5 + static_cast<double>(variance) / static_cast<double>(indices_length));
  694. variance -= static_cast<unsigned long long>(
  695. ensureSquareDistance<Distance>(
  696. distance_(mean, ZeroIterator<ElementType>(), veclen_)));
  697. DistanceType radius = 0;
  698. for (unsigned int i=0; i<indices_length; ++i) {
  699. DistanceType tmp = distance_(mean, dataset_[indices[i]], veclen_);
  700. if (tmp>radius) {
  701. radius = tmp;
  702. }
  703. }
  704. node->variance = static_cast<DistanceType>(variance);
  705. node->radius = radius;
  706. node->pivot = mean;
  707. delete[] mean_accumulator;
  708. }
  709. void computeDnaNodeStatistics(KMeansNodePtr node, int* indices,
  710. unsigned int indices_length)
  711. {
  712. const unsigned int histos_veclen = static_cast<unsigned int>(
  713. veclen_*sizeof(CentersType)*(HISTOS_PER_BASE*BASE_PER_CHAR));
  714. unsigned long long variance = 0ull;
  715. unsigned int* histograms = new unsigned int[histos_veclen];
  716. memset(histograms, 0, sizeof(unsigned int)*histos_veclen);
  717. for (unsigned int i=0; i<indices_length; ++i) {
  718. variance += static_cast<unsigned long long>( ensureSquareDistance<Distance>(
  719. distance_(dataset_[indices[i]], ZeroIterator<ElementType>(), veclen_)));
  720. unsigned char* vec = (unsigned char*)dataset_[indices[i]];
  721. for (size_t k=0, l=0; k<histos_veclen; k+=HISTOS_PER_BASE*BASE_PER_CHAR, ++l) {
  722. histograms[k + ((vec[l]) & 0x03)]++;
  723. histograms[k + 4 + ((vec[l]>>2) & 0x03)]++;
  724. histograms[k + 8 + ((vec[l]>>4) & 0x03)]++;
  725. histograms[k +12 + ((vec[l]>>6) & 0x03)]++;
  726. }
  727. }
  728. CentersType* mean = new CentersType[veclen_];
  729. memoryCounter_ += int(veclen_*sizeof(CentersType));
  730. unsigned char* char_mean = (unsigned char*)mean;
  731. unsigned int* h = histograms;
  732. for (size_t k=0, l=0; k<histos_veclen; k+=HISTOS_PER_BASE*BASE_PER_CHAR, ++l) {
  733. char_mean[l] = (h[k] > h[k+1] ? h[k+2] > h[k+3] ? h[k] > h[k+2] ? 0x00 : 0x10
  734. : h[k] > h[k+3] ? 0x00 : 0x11
  735. : h[k+2] > h[k+3] ? h[k+1] > h[k+2] ? 0x01 : 0x10
  736. : h[k+1] > h[k+3] ? 0x01 : 0x11)
  737. | (h[k+4]>h[k+5] ? h[k+6] > h[k+7] ? h[k+4] > h[k+6] ? 0x00 : 0x1000
  738. : h[k+4] > h[k+7] ? 0x00 : 0x1100
  739. : h[k+6] > h[k+7] ? h[k+5] > h[k+6] ? 0x0100 : 0x1000
  740. : h[k+5] > h[k+7] ? 0x0100 : 0x1100)
  741. | (h[k+8]>h[k+9] ? h[k+10]>h[k+11] ? h[k+8] >h[k+10] ? 0x00 : 0x100000
  742. : h[k+8] >h[k+11] ? 0x00 : 0x110000
  743. : h[k+10]>h[k+11] ? h[k+9] >h[k+10] ? 0x010000 : 0x100000
  744. : h[k+9] >h[k+11] ? 0x010000 : 0x110000)
  745. | (h[k+12]>h[k+13] ? h[k+14]>h[k+15] ? h[k+12] >h[k+14] ? 0x00 : 0x10000000
  746. : h[k+12] >h[k+15] ? 0x00 : 0x11000000
  747. : h[k+14]>h[k+15] ? h[k+13] >h[k+14] ? 0x01000000 : 0x10000000
  748. : h[k+13] >h[k+15] ? 0x01000000 : 0x11000000);
  749. }
  750. variance = static_cast<unsigned long long>(
  751. 0.5 + static_cast<double>(variance) / static_cast<double>(indices_length));
  752. variance -= static_cast<unsigned long long>(
  753. ensureSquareDistance<Distance>(
  754. distance_(mean, ZeroIterator<ElementType>(), veclen_)));
  755. DistanceType radius = 0;
  756. for (unsigned int i=0; i<indices_length; ++i) {
  757. DistanceType tmp = distance_(mean, dataset_[indices[i]], veclen_);
  758. if (tmp>radius) {
  759. radius = tmp;
  760. }
  761. }
  762. node->variance = static_cast<DistanceType>(variance);
  763. node->radius = radius;
  764. node->pivot = mean;
  765. delete[] histograms;
  766. }
  767. template<typename DistType>
  768. void computeNodeStatistics(KMeansNodePtr node, int* indices,
  769. unsigned int indices_length,
  770. const DistType* identifier)
  771. {
  772. (void)identifier;
  773. computeNodeStatistics(node, indices, indices_length);
  774. }
  775. void computeNodeStatistics(KMeansNodePtr node, int* indices,
  776. unsigned int indices_length,
  777. const cvflann::HammingLUT* identifier)
  778. {
  779. (void)identifier;
  780. computeBitfieldNodeStatistics(node, indices, indices_length);
  781. }
  782. void computeNodeStatistics(KMeansNodePtr node, int* indices,
  783. unsigned int indices_length,
  784. const cvflann::Hamming<unsigned char>* identifier)
  785. {
  786. (void)identifier;
  787. computeBitfieldNodeStatistics(node, indices, indices_length);
  788. }
  789. void computeNodeStatistics(KMeansNodePtr node, int* indices,
  790. unsigned int indices_length,
  791. const cvflann::Hamming2<unsigned char>* identifier)
  792. {
  793. (void)identifier;
  794. computeBitfieldNodeStatistics(node, indices, indices_length);
  795. }
  796. void computeNodeStatistics(KMeansNodePtr node, int* indices,
  797. unsigned int indices_length,
  798. const cvflann::DNAmmingLUT* identifier)
  799. {
  800. (void)identifier;
  801. computeDnaNodeStatistics(node, indices, indices_length);
  802. }
  803. void computeNodeStatistics(KMeansNodePtr node, int* indices,
  804. unsigned int indices_length,
  805. const cvflann::DNAmming2<unsigned char>* identifier)
  806. {
  807. (void)identifier;
  808. computeDnaNodeStatistics(node, indices, indices_length);
  809. }
  810. void refineClustering(int* indices, int indices_length, int branching, CentersType** centers,
  811. std::vector<DistanceType>& radiuses, int* belongs_to, int* count)
  812. {
  813. cv::AutoBuffer<double> dcenters_buf(branching*veclen_);
  814. Matrix<double> dcenters(dcenters_buf.data(), branching, veclen_);
  815. bool converged = false;
  816. int iteration = 0;
  817. while (!converged && iteration<iterations_) {
  818. converged = true;
  819. iteration++;
  820. // compute the new cluster centers
  821. for (int i=0; i<branching; ++i) {
  822. memset(dcenters[i],0,sizeof(double)*veclen_);
  823. radiuses[i] = 0;
  824. }
  825. for (int i=0; i<indices_length; ++i) {
  826. ElementType* vec = dataset_[indices[i]];
  827. double* center = dcenters[belongs_to[i]];
  828. for (size_t k=0; k<veclen_; ++k) {
  829. center[k] += vec[k];
  830. }
  831. }
  832. for (int i=0; i<branching; ++i) {
  833. int cnt = count[i];
  834. for (size_t k=0; k<veclen_; ++k) {
  835. dcenters[i][k] /= cnt;
  836. }
  837. }
  838. std::vector<int> new_centroids(indices_length);
  839. std::vector<DistanceType> sq_dists(indices_length);
  840. // reassign points to clusters
  841. KMeansDistanceComputer<Matrix<double> > invoker(
  842. distance_, dataset_, branching, indices, dcenters, veclen_, new_centroids, sq_dists);
  843. parallel_for_(cv::Range(0, (int)indices_length), invoker);
  844. for (int i=0; i < (int)indices_length; ++i) {
  845. DistanceType sq_dist(sq_dists[i]);
  846. int new_centroid(new_centroids[i]);
  847. if (sq_dist > radiuses[new_centroid]) {
  848. radiuses[new_centroid] = sq_dist;
  849. }
  850. if (new_centroid != belongs_to[i]) {
  851. count[belongs_to[i]]--;
  852. count[new_centroid]++;
  853. belongs_to[i] = new_centroid;
  854. converged = false;
  855. }
  856. }
  857. for (int i=0; i<branching; ++i) {
  858. // if one cluster converges to an empty cluster,
  859. // move an element into that cluster
  860. if (count[i]==0) {
  861. int j = (i+1)%branching;
  862. while (count[j]<=1) {
  863. j = (j+1)%branching;
  864. }
  865. for (int k=0; k<indices_length; ++k) {
  866. if (belongs_to[k]==j) {
  867. // for cluster j, we move the furthest element from the center to the empty cluster i
  868. if ( distance_(dataset_[indices[k]], dcenters[j], veclen_) == radiuses[j] ) {
  869. belongs_to[k] = i;
  870. count[j]--;
  871. count[i]++;
  872. break;
  873. }
  874. }
  875. }
  876. converged = false;
  877. }
  878. }
  879. }
  880. for (int i=0; i<branching; ++i) {
  881. centers[i] = new CentersType[veclen_];
  882. memoryCounter_ += (int)(veclen_*sizeof(CentersType));
  883. for (size_t k=0; k<veclen_; ++k) {
  884. centers[i][k] = (CentersType)dcenters[i][k];
  885. }
  886. }
  887. }
  888. void refineBitfieldClustering(int* indices, int indices_length, int branching, CentersType** centers,
  889. std::vector<DistanceType>& radiuses, int* belongs_to, int* count)
  890. {
  891. for (int i=0; i<branching; ++i) {
  892. centers[i] = new CentersType[veclen_];
  893. memoryCounter_ += (int)(veclen_*sizeof(CentersType));
  894. }
  895. const unsigned int accumulator_veclen = static_cast<unsigned int>(
  896. veclen_*sizeof(ElementType)*BITS_PER_CHAR);
  897. cv::AutoBuffer<unsigned int> dcenters_buf(branching*accumulator_veclen);
  898. Matrix<unsigned int> dcenters(dcenters_buf.data(), branching, accumulator_veclen);
  899. bool converged = false;
  900. int iteration = 0;
  901. while (!converged && iteration<iterations_) {
  902. converged = true;
  903. iteration++;
  904. // compute the new cluster centers
  905. for (int i=0; i<branching; ++i) {
  906. memset(dcenters[i],0,sizeof(unsigned int)*accumulator_veclen);
  907. radiuses[i] = 0;
  908. }
  909. for (int i=0; i<indices_length; ++i) {
  910. unsigned char* vec = (unsigned char*)dataset_[indices[i]];
  911. unsigned int* dcenter = dcenters[belongs_to[i]];
  912. for (size_t k=0, l=0; k<accumulator_veclen; k+=BITS_PER_CHAR, ++l) {
  913. dcenter[k] += (vec[l]) & 0x01;
  914. dcenter[k+1] += (vec[l]>>1) & 0x01;
  915. dcenter[k+2] += (vec[l]>>2) & 0x01;
  916. dcenter[k+3] += (vec[l]>>3) & 0x01;
  917. dcenter[k+4] += (vec[l]>>4) & 0x01;
  918. dcenter[k+5] += (vec[l]>>5) & 0x01;
  919. dcenter[k+6] += (vec[l]>>6) & 0x01;
  920. dcenter[k+7] += (vec[l]>>7) & 0x01;
  921. }
  922. }
  923. for (int i=0; i<branching; ++i) {
  924. double cnt = static_cast<double>(count[i]);
  925. unsigned int* dcenter = dcenters[i];
  926. unsigned char* charCenter = (unsigned char*)centers[i];
  927. for (size_t k=0, l=0; k<accumulator_veclen; k+=BITS_PER_CHAR, ++l) {
  928. charCenter[l] = static_cast<unsigned char>(
  929. (((int)(0.5 + (double)(dcenter[k]) / cnt)))
  930. | (((int)(0.5 + (double)(dcenter[k+1]) / cnt))<<1)
  931. | (((int)(0.5 + (double)(dcenter[k+2]) / cnt))<<2)
  932. | (((int)(0.5 + (double)(dcenter[k+3]) / cnt))<<3)
  933. | (((int)(0.5 + (double)(dcenter[k+4]) / cnt))<<4)
  934. | (((int)(0.5 + (double)(dcenter[k+5]) / cnt))<<5)
  935. | (((int)(0.5 + (double)(dcenter[k+6]) / cnt))<<6)
  936. | (((int)(0.5 + (double)(dcenter[k+7]) / cnt))<<7));
  937. }
  938. }
  939. std::vector<int> new_centroids(indices_length);
  940. std::vector<DistanceType> dists(indices_length);
  941. // reassign points to clusters
  942. KMeansDistanceComputer<ElementType**> invoker(
  943. distance_, dataset_, branching, indices, centers, veclen_, new_centroids, dists);
  944. parallel_for_(cv::Range(0, (int)indices_length), invoker);
  945. for (int i=0; i < indices_length; ++i) {
  946. DistanceType dist(dists[i]);
  947. int new_centroid(new_centroids[i]);
  948. if (dist > radiuses[new_centroid]) {
  949. radiuses[new_centroid] = dist;
  950. }
  951. if (new_centroid != belongs_to[i]) {
  952. count[belongs_to[i]]--;
  953. count[new_centroid]++;
  954. belongs_to[i] = new_centroid;
  955. converged = false;
  956. }
  957. }
  958. for (int i=0; i<branching; ++i) {
  959. // if one cluster converges to an empty cluster,
  960. // move an element into that cluster
  961. if (count[i]==0) {
  962. int j = (i+1)%branching;
  963. while (count[j]<=1) {
  964. j = (j+1)%branching;
  965. }
  966. for (int k=0; k<indices_length; ++k) {
  967. if (belongs_to[k]==j) {
  968. // for cluster j, we move the furthest element from the center to the empty cluster i
  969. if ( distance_(dataset_[indices[k]], centers[j], veclen_) == radiuses[j] ) {
  970. belongs_to[k] = i;
  971. count[j]--;
  972. count[i]++;
  973. break;
  974. }
  975. }
  976. }
  977. converged = false;
  978. }
  979. }
  980. }
  981. }
  982. void refineDnaClustering(int* indices, int indices_length, int branching, CentersType** centers,
  983. std::vector<DistanceType>& radiuses, int* belongs_to, int* count)
  984. {
  985. for (int i=0; i<branching; ++i) {
  986. centers[i] = new CentersType[veclen_];
  987. memoryCounter_ += (int)(veclen_*sizeof(CentersType));
  988. }
  989. const unsigned int histos_veclen = static_cast<unsigned int>(
  990. veclen_*sizeof(CentersType)*(HISTOS_PER_BASE*BASE_PER_CHAR));
  991. cv::AutoBuffer<unsigned int> histos_buf(branching*histos_veclen);
  992. Matrix<unsigned int> histos(histos_buf.data(), branching, histos_veclen);
  993. bool converged = false;
  994. int iteration = 0;
  995. while (!converged && iteration<iterations_) {
  996. converged = true;
  997. iteration++;
  998. // compute the new cluster centers
  999. for (int i=0; i<branching; ++i) {
  1000. memset(histos[i],0,sizeof(unsigned int)*histos_veclen);
  1001. radiuses[i] = 0;
  1002. }
  1003. for (int i=0; i<indices_length; ++i) {
  1004. unsigned char* vec = (unsigned char*)dataset_[indices[i]];
  1005. unsigned int* h = histos[belongs_to[i]];
  1006. for (size_t k=0, l=0; k<histos_veclen; k+=HISTOS_PER_BASE*BASE_PER_CHAR, ++l) {
  1007. h[k + ((vec[l]) & 0x03)]++;
  1008. h[k + 4 + ((vec[l]>>2) & 0x03)]++;
  1009. h[k + 8 + ((vec[l]>>4) & 0x03)]++;
  1010. h[k +12 + ((vec[l]>>6) & 0x03)]++;
  1011. }
  1012. }
  1013. for (int i=0; i<branching; ++i) {
  1014. unsigned int* h = histos[i];
  1015. unsigned char* charCenter = (unsigned char*)centers[i];
  1016. for (size_t k=0, l=0; k<histos_veclen; k+=HISTOS_PER_BASE*BASE_PER_CHAR, ++l) {
  1017. charCenter[l]= (h[k] > h[k+1] ? h[k+2] > h[k+3] ? h[k] > h[k+2] ? 0x00 : 0x10
  1018. : h[k] > h[k+3] ? 0x00 : 0x11
  1019. : h[k+2] > h[k+3] ? h[k+1] > h[k+2] ? 0x01 : 0x10
  1020. : h[k+1] > h[k+3] ? 0x01 : 0x11)
  1021. | (h[k+4]>h[k+5] ? h[k+6] > h[k+7] ? h[k+4] > h[k+6] ? 0x00 : 0x1000
  1022. : h[k+4] > h[k+7] ? 0x00 : 0x1100
  1023. : h[k+6] > h[k+7] ? h[k+5] > h[k+6] ? 0x0100 : 0x1000
  1024. : h[k+5] > h[k+7] ? 0x0100 : 0x1100)
  1025. | (h[k+8]>h[k+9] ? h[k+10]>h[k+11] ? h[k+8] >h[k+10] ? 0x00 : 0x100000
  1026. : h[k+8] >h[k+11] ? 0x00 : 0x110000
  1027. : h[k+10]>h[k+11] ? h[k+9] >h[k+10] ? 0x010000 : 0x100000
  1028. : h[k+9] >h[k+11] ? 0x010000 : 0x110000)
  1029. | (h[k+12]>h[k+13] ? h[k+14]>h[k+15] ? h[k+12] >h[k+14] ? 0x00 : 0x10000000
  1030. : h[k+12] >h[k+15] ? 0x00 : 0x11000000
  1031. : h[k+14]>h[k+15] ? h[k+13] >h[k+14] ? 0x01000000 : 0x10000000
  1032. : h[k+13] >h[k+15] ? 0x01000000 : 0x11000000);
  1033. }
  1034. }
  1035. std::vector<int> new_centroids(indices_length);
  1036. std::vector<DistanceType> dists(indices_length);
  1037. // reassign points to clusters
  1038. KMeansDistanceComputer<ElementType**> invoker(
  1039. distance_, dataset_, branching, indices, centers, veclen_, new_centroids, dists);
  1040. parallel_for_(cv::Range(0, (int)indices_length), invoker);
  1041. for (int i=0; i < indices_length; ++i) {
  1042. DistanceType dist(dists[i]);
  1043. int new_centroid(new_centroids[i]);
  1044. if (dist > radiuses[new_centroid]) {
  1045. radiuses[new_centroid] = dist;
  1046. }
  1047. if (new_centroid != belongs_to[i]) {
  1048. count[belongs_to[i]]--;
  1049. count[new_centroid]++;
  1050. belongs_to[i] = new_centroid;
  1051. converged = false;
  1052. }
  1053. }
  1054. for (int i=0; i<branching; ++i) {
  1055. // if one cluster converges to an empty cluster,
  1056. // move an element into that cluster
  1057. if (count[i]==0) {
  1058. int j = (i+1)%branching;
  1059. while (count[j]<=1) {
  1060. j = (j+1)%branching;
  1061. }
  1062. for (int k=0; k<indices_length; ++k) {
  1063. if (belongs_to[k]==j) {
  1064. // for cluster j, we move the furthest element from the center to the empty cluster i
  1065. if ( distance_(dataset_[indices[k]], centers[j], veclen_) == radiuses[j] ) {
  1066. belongs_to[k] = i;
  1067. count[j]--;
  1068. count[i]++;
  1069. break;
  1070. }
  1071. }
  1072. }
  1073. converged = false;
  1074. }
  1075. }
  1076. }
  1077. }
  1078. void computeSubClustering(KMeansNodePtr node, int* indices, int indices_length,
  1079. int branching, int level, CentersType** centers,
  1080. std::vector<DistanceType>& radiuses, int* belongs_to, int* count)
  1081. {
  1082. // compute kmeans clustering for each of the resulting clusters
  1083. node->childs = pool_.allocate<KMeansNodePtr>(branching);
  1084. int start = 0;
  1085. int end = start;
  1086. for (int c=0; c<branching; ++c) {
  1087. int s = count[c];
  1088. DistanceType variance = 0;
  1089. DistanceType mean_radius =0;
  1090. for (int i=0; i<indices_length; ++i) {
  1091. if (belongs_to[i]==c) {
  1092. DistanceType d = distance_(dataset_[indices[i]], ZeroIterator<ElementType>(), veclen_);
  1093. variance += d;
  1094. mean_radius += static_cast<DistanceType>( sqrt(d) );
  1095. std::swap(indices[i],indices[end]);
  1096. std::swap(belongs_to[i],belongs_to[end]);
  1097. end++;
  1098. }
  1099. }
  1100. variance /= s;
  1101. mean_radius /= s;
  1102. variance -= distance_(centers[c], ZeroIterator<ElementType>(), veclen_);
  1103. node->childs[c] = pool_.allocate<KMeansNode>();
  1104. std::memset(node->childs[c], 0, sizeof(KMeansNode));
  1105. node->childs[c]->radius = radiuses[c];
  1106. node->childs[c]->pivot = centers[c];
  1107. node->childs[c]->variance = variance;
  1108. node->childs[c]->mean_radius = mean_radius;
  1109. computeClustering(node->childs[c],indices+start, end-start, branching, level+1);
  1110. start=end;
  1111. }
  1112. }
  1113. void computeAnyBitfieldSubClustering(KMeansNodePtr node, int* indices, int indices_length,
  1114. int branching, int level, CentersType** centers,
  1115. std::vector<DistanceType>& radiuses, int* belongs_to, int* count)
  1116. {
  1117. // compute kmeans clustering for each of the resulting clusters
  1118. node->childs = pool_.allocate<KMeansNodePtr>(branching);
  1119. int start = 0;
  1120. int end = start;
  1121. for (int c=0; c<branching; ++c) {
  1122. int s = count[c];
  1123. unsigned long long variance = 0ull;
  1124. DistanceType mean_radius =0;
  1125. for (int i=0; i<indices_length; ++i) {
  1126. if (belongs_to[i]==c) {
  1127. DistanceType d = distance_(dataset_[indices[i]], ZeroIterator<ElementType>(), veclen_);
  1128. variance += static_cast<unsigned long long>( ensureSquareDistance<Distance>(d) );
  1129. mean_radius += ensureSimpleDistance<Distance>(d);
  1130. std::swap(indices[i],indices[end]);
  1131. std::swap(belongs_to[i],belongs_to[end]);
  1132. end++;
  1133. }
  1134. }
  1135. mean_radius = static_cast<DistanceType>(
  1136. 0.5f + static_cast<float>(mean_radius) / static_cast<float>(s));
  1137. variance = static_cast<unsigned long long>(
  1138. 0.5 + static_cast<double>(variance) / static_cast<double>(s));
  1139. variance -= static_cast<unsigned long long>(
  1140. ensureSquareDistance<Distance>(
  1141. distance_(centers[c], ZeroIterator<ElementType>(), veclen_)));
  1142. node->childs[c] = pool_.allocate<KMeansNode>();
  1143. std::memset(node->childs[c], 0, sizeof(KMeansNode));
  1144. node->childs[c]->radius = radiuses[c];
  1145. node->childs[c]->pivot = centers[c];
  1146. node->childs[c]->variance = static_cast<DistanceType>(variance);
  1147. node->childs[c]->mean_radius = mean_radius;
  1148. computeClustering(node->childs[c],indices+start, end-start, branching, level+1);
  1149. start=end;
  1150. }
  1151. }
  1152. template<typename DistType>
  1153. void refineAndSplitClustering(
  1154. KMeansNodePtr node, int* indices, int indices_length, int branching,
  1155. int level, CentersType** centers, std::vector<DistanceType>& radiuses,
  1156. int* belongs_to, int* count, const DistType* identifier)
  1157. {
  1158. (void)identifier;
  1159. refineClustering(indices, indices_length, branching, centers, radiuses, belongs_to, count);
  1160. computeSubClustering(node, indices, indices_length, branching,
  1161. level, centers, radiuses, belongs_to, count);
  1162. }
  1163. /**
  1164. * The methods responsible with doing the recursive hierarchical clustering on
  1165. * binary vectors.
  1166. * As some might have heard that KMeans on binary data doesn't make sense,
  1167. * it's worth a little explanation why it actually fairly works. As
  1168. * with the Hierarchical Clustering algortihm, we seed several centers for the
  1169. * current node by picking some of its points. Then in a first pass each point
  1170. * of the node is then related to its closest center. Now let's have a look at
  1171. * the 5 central dimensions of the 9 following points:
  1172. *
  1173. * xxxxxx11100xxxxx (1)
  1174. * xxxxxx11010xxxxx (2)
  1175. * xxxxxx11001xxxxx (3)
  1176. * xxxxxx10110xxxxx (4)
  1177. * xxxxxx10101xxxxx (5)
  1178. * xxxxxx10011xxxxx (6)
  1179. * xxxxxx01110xxxxx (7)
  1180. * xxxxxx01101xxxxx (8)
  1181. * xxxxxx01011xxxxx (9)
  1182. * sum _____
  1183. * of 1: 66555
  1184. *
  1185. * Even if the barycenter notion doesn't apply, we can set a center
  1186. * xxxxxx11111xxxxx that will better fit the five dimensions we are focusing
  1187. * on for these points.
  1188. *
  1189. * Note that convergence isn't ensured anymore. In practice, using Gonzales
  1190. * as seeding algorithm should be fine for getting convergence ("iterations"
  1191. * value can be set to -1). But with KMeans++ seeding you should definitely
  1192. * set a maximum number of iterations (but make it higher than the "iterations"
  1193. * default value of 11).
  1194. *
  1195. * Params:
  1196. * node = the node to cluster
  1197. * indices = indices of the points belonging to the current node
  1198. * indices_length = number of points in the current node
  1199. * branching = the branching factor to use in the clustering
  1200. * level = 0 for the root node, it increases with the subdivision levels
  1201. * centers = clusters centers to compute
  1202. * radiuses = radiuses of clusters
  1203. * belongs_to = LookUp Table returning, for a given indice id, the center id it belongs to
  1204. * count = array storing the number of indices for a given center id
  1205. * identifier = dummy pointer on an instance of Distance (use to branch correctly among templates)
  1206. */
  1207. void refineAndSplitClustering(
  1208. KMeansNodePtr node, int* indices, int indices_length, int branching,
  1209. int level, CentersType** centers, std::vector<DistanceType>& radiuses,
  1210. int* belongs_to, int* count, const cvflann::HammingLUT* identifier)
  1211. {
  1212. (void)identifier;
  1213. refineBitfieldClustering(
  1214. indices, indices_length, branching, centers, radiuses, belongs_to, count);
  1215. computeAnyBitfieldSubClustering(node, indices, indices_length, branching,
  1216. level, centers, radiuses, belongs_to, count);
  1217. }
  1218. void refineAndSplitClustering(
  1219. KMeansNodePtr node, int* indices, int indices_length, int branching,
  1220. int level, CentersType** centers, std::vector<DistanceType>& radiuses,
  1221. int* belongs_to, int* count, const cvflann::Hamming<unsigned char>* identifier)
  1222. {
  1223. (void)identifier;
  1224. refineBitfieldClustering(
  1225. indices, indices_length, branching, centers, radiuses, belongs_to, count);
  1226. computeAnyBitfieldSubClustering(node, indices, indices_length, branching,
  1227. level, centers, radiuses, belongs_to, count);
  1228. }
  1229. void refineAndSplitClustering(
  1230. KMeansNodePtr node, int* indices, int indices_length, int branching,
  1231. int level, CentersType** centers, std::vector<DistanceType>& radiuses,
  1232. int* belongs_to, int* count, const cvflann::Hamming2<unsigned char>* identifier)
  1233. {
  1234. (void)identifier;
  1235. refineBitfieldClustering(
  1236. indices, indices_length, branching, centers, radiuses, belongs_to, count);
  1237. computeAnyBitfieldSubClustering(node, indices, indices_length, branching,
  1238. level, centers, radiuses, belongs_to, count);
  1239. }
  1240. void refineAndSplitClustering(
  1241. KMeansNodePtr node, int* indices, int indices_length, int branching,
  1242. int level, CentersType** centers, std::vector<DistanceType>& radiuses,
  1243. int* belongs_to, int* count, const cvflann::DNAmmingLUT* identifier)
  1244. {
  1245. (void)identifier;
  1246. refineDnaClustering(
  1247. indices, indices_length, branching, centers, radiuses, belongs_to, count);
  1248. computeAnyBitfieldSubClustering(node, indices, indices_length, branching,
  1249. level, centers, radiuses, belongs_to, count);
  1250. }
  1251. void refineAndSplitClustering(
  1252. KMeansNodePtr node, int* indices, int indices_length, int branching,
  1253. int level, CentersType** centers, std::vector<DistanceType>& radiuses,
  1254. int* belongs_to, int* count, const cvflann::DNAmming2<unsigned char>* identifier)
  1255. {
  1256. (void)identifier;
  1257. refineDnaClustering(
  1258. indices, indices_length, branching, centers, radiuses, belongs_to, count);
  1259. computeAnyBitfieldSubClustering(node, indices, indices_length, branching,
  1260. level, centers, radiuses, belongs_to, count);
  1261. }
  1262. /**
  1263. * The method responsible with actually doing the recursive hierarchical
  1264. * clustering
  1265. *
  1266. * Params:
  1267. * node = the node to cluster
  1268. * indices = indices of the points belonging to the current node
  1269. * branching = the branching factor to use in the clustering
  1270. *
  1271. * TODO: for 1-sized clusters don't store a cluster center (it's the same as the single cluster point)
  1272. */
  1273. void computeClustering(KMeansNodePtr node, int* indices, int indices_length, int branching, int level)
  1274. {
  1275. node->size = indices_length;
  1276. node->level = level;
  1277. if (indices_length < branching) {
  1278. node->indices = indices;
  1279. std::sort(node->indices,node->indices+indices_length);
  1280. node->childs = NULL;
  1281. return;
  1282. }
  1283. cv::AutoBuffer<int> centers_idx_buf(branching);
  1284. int* centers_idx = centers_idx_buf.data();
  1285. int centers_length;
  1286. (this->*chooseCenters)(branching, indices, indices_length, centers_idx, centers_length);
  1287. if (centers_length<branching) {
  1288. node->indices = indices;
  1289. std::sort(node->indices,node->indices+indices_length);
  1290. node->childs = NULL;
  1291. return;
  1292. }
  1293. std::vector<DistanceType> radiuses(branching);
  1294. cv::AutoBuffer<int> count_buf(branching);
  1295. int* count = count_buf.data();
  1296. for (int i=0; i<branching; ++i) {
  1297. radiuses[i] = 0;
  1298. count[i] = 0;
  1299. }
  1300. // assign points to clusters
  1301. cv::AutoBuffer<int> belongs_to_buf(indices_length);
  1302. int* belongs_to = belongs_to_buf.data();
  1303. for (int i=0; i<indices_length; ++i) {
  1304. DistanceType sq_dist = distance_(dataset_[indices[i]], dataset_[centers_idx[0]], veclen_);
  1305. belongs_to[i] = 0;
  1306. for (int j=1; j<branching; ++j) {
  1307. DistanceType new_sq_dist = distance_(dataset_[indices[i]], dataset_[centers_idx[j]], veclen_);
  1308. if (sq_dist>new_sq_dist) {
  1309. belongs_to[i] = j;
  1310. sq_dist = new_sq_dist;
  1311. }
  1312. }
  1313. if (sq_dist>radiuses[belongs_to[i]]) {
  1314. radiuses[belongs_to[i]] = sq_dist;
  1315. }
  1316. count[belongs_to[i]]++;
  1317. }
  1318. CentersType** centers = new CentersType*[branching];
  1319. Distance* dummy = NULL;
  1320. refineAndSplitClustering(node, indices, indices_length, branching, level,
  1321. centers, radiuses, belongs_to, count, dummy);
  1322. delete[] centers;
  1323. }
  1324. /**
  1325. * Performs one descent in the hierarchical k-means tree. The branches not
  1326. * visited are stored in a priority queue.
  1327. *
  1328. * Params:
  1329. * node = node to explore
  1330. * result = container for the k-nearest neighbors found
  1331. * vec = query points
  1332. * checks = how many points in the dataset have been checked so far
  1333. * maxChecks = maximum dataset points to checks
  1334. */
  1335. void findNN(KMeansNodePtr node, ResultSet<DistanceType>& result, const ElementType* vec, int& checks, int maxChecks,
  1336. Heap<BranchSt>* heap)
  1337. {
  1338. // Ignore those clusters that are too far away
  1339. {
  1340. DistanceType bsq = distance_(vec, node->pivot, veclen_);
  1341. DistanceType rsq = node->radius;
  1342. DistanceType wsq = result.worstDist();
  1343. if (isSquareDistance<Distance>())
  1344. {
  1345. DistanceType val = bsq-rsq-wsq;
  1346. if ((val>0) && (val*val > 4*rsq*wsq))
  1347. return;
  1348. }
  1349. else
  1350. {
  1351. if (bsq-rsq > wsq)
  1352. return;
  1353. }
  1354. }
  1355. if (node->childs==NULL) {
  1356. if ((checks>=maxChecks) && result.full()) {
  1357. return;
  1358. }
  1359. checks += node->size;
  1360. for (int i=0; i<node->size; ++i) {
  1361. int index = node->indices[i];
  1362. DistanceType dist = distance_(dataset_[index], vec, veclen_);
  1363. result.addPoint(dist, index);
  1364. }
  1365. }
  1366. else {
  1367. DistanceType* domain_distances = new DistanceType[branching_];
  1368. int closest_center = exploreNodeBranches(node, vec, domain_distances, heap);
  1369. delete[] domain_distances;
  1370. findNN(node->childs[closest_center],result,vec, checks, maxChecks, heap);
  1371. }
  1372. }
  1373. /**
  1374. * Helper function that computes the nearest childs of a node to a given query point.
  1375. * Params:
  1376. * node = the node
  1377. * q = the query point
  1378. * distances = array with the distances to each child node.
  1379. * Returns:
  1380. */
  1381. int exploreNodeBranches(KMeansNodePtr node, const ElementType* q, DistanceType* domain_distances, Heap<BranchSt>* heap)
  1382. {
  1383. int best_index = 0;
  1384. domain_distances[best_index] = distance_(q, node->childs[best_index]->pivot, veclen_);
  1385. for (int i=1; i<branching_; ++i) {
  1386. domain_distances[i] = distance_(q, node->childs[i]->pivot, veclen_);
  1387. if (domain_distances[i]<domain_distances[best_index]) {
  1388. best_index = i;
  1389. }
  1390. }
  1391. // float* best_center = node->childs[best_index]->pivot;
  1392. for (int i=0; i<branching_; ++i) {
  1393. if (i != best_index) {
  1394. domain_distances[i] -= cvflann::round<DistanceType>(
  1395. cb_index_*node->childs[i]->variance );
  1396. // float dist_to_border = getDistanceToBorder(node.childs[i].pivot,best_center,q);
  1397. // if (domain_distances[i]<dist_to_border) {
  1398. // domain_distances[i] = dist_to_border;
  1399. // }
  1400. heap->insert(BranchSt(node->childs[i],domain_distances[i]));
  1401. }
  1402. }
  1403. return best_index;
  1404. }
  1405. /**
  1406. * Function the performs exact nearest neighbor search by traversing the entire tree.
  1407. */
  1408. void findExactNN(KMeansNodePtr node, ResultSet<DistanceType>& result, const ElementType* vec)
  1409. {
  1410. // Ignore those clusters that are too far away
  1411. {
  1412. DistanceType bsq = distance_(vec, node->pivot, veclen_);
  1413. DistanceType rsq = node->radius;
  1414. DistanceType wsq = result.worstDist();
  1415. if (isSquareDistance<Distance>())
  1416. {
  1417. DistanceType val = bsq-rsq-wsq;
  1418. if ((val>0) && (val*val > 4*rsq*wsq))
  1419. return;
  1420. }
  1421. else
  1422. {
  1423. if (bsq-rsq > wsq)
  1424. return;
  1425. }
  1426. }
  1427. if (node->childs==NULL) {
  1428. for (int i=0; i<node->size; ++i) {
  1429. int index = node->indices[i];
  1430. DistanceType dist = distance_(dataset_[index], vec, veclen_);
  1431. result.addPoint(dist, index);
  1432. }
  1433. }
  1434. else {
  1435. int* sort_indices = new int[branching_];
  1436. getCenterOrdering(node, vec, sort_indices);
  1437. for (int i=0; i<branching_; ++i) {
  1438. findExactNN(node->childs[sort_indices[i]],result,vec);
  1439. }
  1440. delete[] sort_indices;
  1441. }
  1442. }
  1443. /**
  1444. * Helper function.
  1445. *
  1446. * I computes the order in which to traverse the child nodes of a particular node.
  1447. */
  1448. void getCenterOrdering(KMeansNodePtr node, const ElementType* q, int* sort_indices)
  1449. {
  1450. DistanceType* domain_distances = new DistanceType[branching_];
  1451. for (int i=0; i<branching_; ++i) {
  1452. DistanceType dist = distance_(q, node->childs[i]->pivot, veclen_);
  1453. int j=0;
  1454. while (domain_distances[j]<dist && j<i)
  1455. j++;
  1456. for (int k=i; k>j; --k) {
  1457. domain_distances[k] = domain_distances[k-1];
  1458. sort_indices[k] = sort_indices[k-1];
  1459. }
  1460. domain_distances[j] = dist;
  1461. sort_indices[j] = i;
  1462. }
  1463. delete[] domain_distances;
  1464. }
  1465. /**
  1466. * Method that computes the squared distance from the query point q
  1467. * from inside region with center c to the border between this
  1468. * region and the region with center p
  1469. */
  1470. DistanceType getDistanceToBorder(DistanceType* p, DistanceType* c, DistanceType* q)
  1471. {
  1472. DistanceType sum = 0;
  1473. DistanceType sum2 = 0;
  1474. for (int i=0; i<veclen_; ++i) {
  1475. DistanceType t = c[i]-p[i];
  1476. sum += t*(q[i]-(c[i]+p[i])/2);
  1477. sum2 += t*t;
  1478. }
  1479. return sum*sum/sum2;
  1480. }
  1481. /**
  1482. * Helper function the descends in the hierarchical k-means tree by splitting those clusters that minimize
  1483. * the overall variance of the clustering.
  1484. * Params:
  1485. * root = root node
  1486. * clusters = array with clusters centers (return value)
  1487. * varianceValue = variance of the clustering (return value)
  1488. * Returns:
  1489. */
  1490. int getMinVarianceClusters(KMeansNodePtr root, KMeansNodePtr* clusters, int clusters_length, DistanceType& varianceValue)
  1491. {
  1492. int clusterCount = 1;
  1493. clusters[0] = root;
  1494. DistanceType meanVariance = root->variance*root->size;
  1495. while (clusterCount<clusters_length) {
  1496. DistanceType minVariance = (std::numeric_limits<DistanceType>::max)();
  1497. int splitIndex = -1;
  1498. for (int i=0; i<clusterCount; ++i) {
  1499. if (clusters[i]->childs != NULL) {
  1500. DistanceType variance = meanVariance - clusters[i]->variance*clusters[i]->size;
  1501. for (int j=0; j<branching_; ++j) {
  1502. variance += clusters[i]->childs[j]->variance*clusters[i]->childs[j]->size;
  1503. }
  1504. if (variance<minVariance) {
  1505. minVariance = variance;
  1506. splitIndex = i;
  1507. }
  1508. }
  1509. }
  1510. if (splitIndex==-1) break;
  1511. if ( (branching_+clusterCount-1) > clusters_length) break;
  1512. meanVariance = minVariance;
  1513. // split node
  1514. KMeansNodePtr toSplit = clusters[splitIndex];
  1515. clusters[splitIndex] = toSplit->childs[0];
  1516. for (int i=1; i<branching_; ++i) {
  1517. clusters[clusterCount++] = toSplit->childs[i];
  1518. }
  1519. }
  1520. varianceValue = meanVariance/root->size;
  1521. return clusterCount;
  1522. }
  1523. private:
  1524. /** The branching factor used in the hierarchical k-means clustering */
  1525. int branching_;
  1526. /** Number of kmeans trees (default is one) */
  1527. int trees_;
  1528. /** Maximum number of iterations to use when performing k-means clustering */
  1529. int iterations_;
  1530. /** Algorithm for choosing the cluster centers */
  1531. flann_centers_init_t centers_init_;
  1532. /**
  1533. * Cluster border index. This is used in the tree search phase when determining
  1534. * the closest cluster to explore next. A zero value takes into account only
  1535. * the cluster centres, a value greater then zero also take into account the size
  1536. * of the cluster.
  1537. */
  1538. float cb_index_;
  1539. /**
  1540. * The dataset used by this index
  1541. */
  1542. const Matrix<ElementType> dataset_;
  1543. /** Index parameters */
  1544. IndexParams index_params_;
  1545. /**
  1546. * Number of features in the dataset.
  1547. */
  1548. size_t size_;
  1549. /**
  1550. * Length of each feature.
  1551. */
  1552. size_t veclen_;
  1553. /**
  1554. * The root node in the tree.
  1555. */
  1556. KMeansNodePtr* root_;
  1557. /**
  1558. * Array of indices to vectors in the dataset.
  1559. */
  1560. int** indices_;
  1561. /**
  1562. * The distance
  1563. */
  1564. Distance distance_;
  1565. /**
  1566. * Pooled memory allocator.
  1567. */
  1568. PooledAllocator pool_;
  1569. /**
  1570. * Memory occupied by the index.
  1571. */
  1572. int memoryCounter_;
  1573. };
  1574. }
  1575. //! @endcond
  1576. #endif //OPENCV_FLANN_KMEANS_INDEX_H_