kdtree_index.h 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621
  1. /***********************************************************************
  2. * Software License Agreement (BSD License)
  3. *
  4. * Copyright 2008-2009 Marius Muja (mariusm@cs.ubc.ca). All rights reserved.
  5. * Copyright 2008-2009 David G. Lowe (lowe@cs.ubc.ca). All rights reserved.
  6. *
  7. * THE BSD LICENSE
  8. *
  9. * Redistribution and use in source and binary forms, with or without
  10. * modification, are permitted provided that the following conditions
  11. * are met:
  12. *
  13. * 1. Redistributions of source code must retain the above copyright
  14. * notice, this list of conditions and the following disclaimer.
  15. * 2. Redistributions in binary form must reproduce the above copyright
  16. * notice, this list of conditions and the following disclaimer in the
  17. * documentation and/or other materials provided with the distribution.
  18. *
  19. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  20. * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  21. * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  22. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  23. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  24. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  25. * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  26. * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  27. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  28. * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  29. *************************************************************************/
  30. #ifndef OPENCV_FLANN_KDTREE_INDEX_H_
  31. #define OPENCV_FLANN_KDTREE_INDEX_H_
  32. #include <algorithm>
  33. #include <map>
  34. #include <cassert>
  35. #include <cstring>
  36. #include "general.h"
  37. #include "nn_index.h"
  38. #include "dynamic_bitset.h"
  39. #include "matrix.h"
  40. #include "result_set.h"
  41. #include "heap.h"
  42. #include "allocator.h"
  43. #include "random.h"
  44. #include "saving.h"
  45. namespace cvflann
  46. {
  47. struct KDTreeIndexParams : public IndexParams
  48. {
  49. KDTreeIndexParams(int trees = 4)
  50. {
  51. (*this)["algorithm"] = FLANN_INDEX_KDTREE;
  52. (*this)["trees"] = trees;
  53. }
  54. };
  55. /**
  56. * Randomized kd-tree index
  57. *
  58. * Contains the k-d trees and other information for indexing a set of points
  59. * for nearest-neighbor matching.
  60. */
  61. template <typename Distance>
  62. class KDTreeIndex : public NNIndex<Distance>
  63. {
  64. public:
  65. typedef typename Distance::ElementType ElementType;
  66. typedef typename Distance::ResultType DistanceType;
  67. /**
  68. * KDTree constructor
  69. *
  70. * Params:
  71. * inputData = dataset with the input features
  72. * params = parameters passed to the kdtree algorithm
  73. */
  74. KDTreeIndex(const Matrix<ElementType>& inputData, const IndexParams& params = KDTreeIndexParams(),
  75. Distance d = Distance() ) :
  76. dataset_(inputData), index_params_(params), distance_(d)
  77. {
  78. size_ = dataset_.rows;
  79. veclen_ = dataset_.cols;
  80. trees_ = get_param(index_params_,"trees",4);
  81. tree_roots_ = new NodePtr[trees_];
  82. // Create a permutable array of indices to the input vectors.
  83. vind_.resize(size_);
  84. for (size_t i = 0; i < size_; ++i) {
  85. vind_[i] = int(i);
  86. }
  87. mean_ = new DistanceType[veclen_];
  88. var_ = new DistanceType[veclen_];
  89. }
  90. KDTreeIndex(const KDTreeIndex&);
  91. KDTreeIndex& operator=(const KDTreeIndex&);
  92. /**
  93. * Standard destructor
  94. */
  95. ~KDTreeIndex()
  96. {
  97. if (tree_roots_!=NULL) {
  98. delete[] tree_roots_;
  99. }
  100. delete[] mean_;
  101. delete[] var_;
  102. }
  103. /**
  104. * Builds the index
  105. */
  106. void buildIndex()
  107. {
  108. /* Construct the randomized trees. */
  109. for (int i = 0; i < trees_; i++) {
  110. /* Randomize the order of vectors to allow for unbiased sampling. */
  111. std::random_shuffle(vind_.begin(), vind_.end());
  112. tree_roots_[i] = divideTree(&vind_[0], int(size_) );
  113. }
  114. }
  115. flann_algorithm_t getType() const
  116. {
  117. return FLANN_INDEX_KDTREE;
  118. }
  119. void saveIndex(FILE* stream)
  120. {
  121. save_value(stream, trees_);
  122. for (int i=0; i<trees_; ++i) {
  123. save_tree(stream, tree_roots_[i]);
  124. }
  125. }
  126. void loadIndex(FILE* stream)
  127. {
  128. load_value(stream, trees_);
  129. if (tree_roots_!=NULL) {
  130. delete[] tree_roots_;
  131. }
  132. tree_roots_ = new NodePtr[trees_];
  133. for (int i=0; i<trees_; ++i) {
  134. load_tree(stream,tree_roots_[i]);
  135. }
  136. index_params_["algorithm"] = getType();
  137. index_params_["trees"] = tree_roots_;
  138. }
  139. /**
  140. * Returns size of index.
  141. */
  142. size_t size() const
  143. {
  144. return size_;
  145. }
  146. /**
  147. * Returns the length of an index feature.
  148. */
  149. size_t veclen() const
  150. {
  151. return veclen_;
  152. }
  153. /**
  154. * Computes the inde memory usage
  155. * Returns: memory used by the index
  156. */
  157. int usedMemory() const
  158. {
  159. return int(pool_.usedMemory+pool_.wastedMemory+dataset_.rows*sizeof(int)); // pool memory and vind array memory
  160. }
  161. /**
  162. * Find set of nearest neighbors to vec. Their indices are stored inside
  163. * the result object.
  164. *
  165. * Params:
  166. * result = the result object in which the indices of the nearest-neighbors are stored
  167. * vec = the vector for which to search the nearest neighbors
  168. * maxCheck = the maximum number of restarts (in a best-bin-first manner)
  169. */
  170. void findNeighbors(ResultSet<DistanceType>& result, const ElementType* vec, const SearchParams& searchParams)
  171. {
  172. int maxChecks = get_param(searchParams,"checks", 32);
  173. float epsError = 1+get_param(searchParams,"eps",0.0f);
  174. if (maxChecks==FLANN_CHECKS_UNLIMITED) {
  175. getExactNeighbors(result, vec, epsError);
  176. }
  177. else {
  178. getNeighbors(result, vec, maxChecks, epsError);
  179. }
  180. }
  181. IndexParams getParameters() const
  182. {
  183. return index_params_;
  184. }
  185. private:
  186. /*--------------------- Internal Data Structures --------------------------*/
  187. struct Node
  188. {
  189. /**
  190. * Dimension used for subdivision.
  191. */
  192. int divfeat;
  193. /**
  194. * The values used for subdivision.
  195. */
  196. DistanceType divval;
  197. /**
  198. * The child nodes.
  199. */
  200. Node* child1, * child2;
  201. };
  202. typedef Node* NodePtr;
  203. typedef BranchStruct<NodePtr, DistanceType> BranchSt;
  204. typedef BranchSt* Branch;
  205. void save_tree(FILE* stream, NodePtr tree)
  206. {
  207. save_value(stream, *tree);
  208. if (tree->child1!=NULL) {
  209. save_tree(stream, tree->child1);
  210. }
  211. if (tree->child2!=NULL) {
  212. save_tree(stream, tree->child2);
  213. }
  214. }
  215. void load_tree(FILE* stream, NodePtr& tree)
  216. {
  217. tree = pool_.allocate<Node>();
  218. load_value(stream, *tree);
  219. if (tree->child1!=NULL) {
  220. load_tree(stream, tree->child1);
  221. }
  222. if (tree->child2!=NULL) {
  223. load_tree(stream, tree->child2);
  224. }
  225. }
  226. /**
  227. * Create a tree node that subdivides the list of vecs from vind[first]
  228. * to vind[last]. The routine is called recursively on each sublist.
  229. * Place a pointer to this new tree node in the location pTree.
  230. *
  231. * Params: pTree = the new node to create
  232. * first = index of the first vector
  233. * last = index of the last vector
  234. */
  235. NodePtr divideTree(int* ind, int count)
  236. {
  237. NodePtr node = pool_.allocate<Node>(); // allocate memory
  238. /* If too few exemplars remain, then make this a leaf node. */
  239. if ( count == 1) {
  240. node->child1 = node->child2 = NULL; /* Mark as leaf node. */
  241. node->divfeat = *ind; /* Store index of this vec. */
  242. }
  243. else {
  244. int idx;
  245. int cutfeat;
  246. DistanceType cutval;
  247. meanSplit(ind, count, idx, cutfeat, cutval);
  248. node->divfeat = cutfeat;
  249. node->divval = cutval;
  250. node->child1 = divideTree(ind, idx);
  251. node->child2 = divideTree(ind+idx, count-idx);
  252. }
  253. return node;
  254. }
  255. /**
  256. * Choose which feature to use in order to subdivide this set of vectors.
  257. * Make a random choice among those with the highest variance, and use
  258. * its variance as the threshold value.
  259. */
  260. void meanSplit(int* ind, int count, int& index, int& cutfeat, DistanceType& cutval)
  261. {
  262. memset(mean_,0,veclen_*sizeof(DistanceType));
  263. memset(var_,0,veclen_*sizeof(DistanceType));
  264. /* Compute mean values. Only the first SAMPLE_MEAN values need to be
  265. sampled to get a good estimate.
  266. */
  267. int cnt = std::min((int)SAMPLE_MEAN+1, count);
  268. for (int j = 0; j < cnt; ++j) {
  269. ElementType* v = dataset_[ind[j]];
  270. for (size_t k=0; k<veclen_; ++k) {
  271. mean_[k] += v[k];
  272. }
  273. }
  274. for (size_t k=0; k<veclen_; ++k) {
  275. mean_[k] /= cnt;
  276. }
  277. /* Compute variances (no need to divide by count). */
  278. for (int j = 0; j < cnt; ++j) {
  279. ElementType* v = dataset_[ind[j]];
  280. for (size_t k=0; k<veclen_; ++k) {
  281. DistanceType dist = v[k] - mean_[k];
  282. var_[k] += dist * dist;
  283. }
  284. }
  285. /* Select one of the highest variance indices at random. */
  286. cutfeat = selectDivision(var_);
  287. cutval = mean_[cutfeat];
  288. int lim1, lim2;
  289. planeSplit(ind, count, cutfeat, cutval, lim1, lim2);
  290. if (lim1>count/2) index = lim1;
  291. else if (lim2<count/2) index = lim2;
  292. else index = count/2;
  293. /* If either list is empty, it means that all remaining features
  294. * are identical. Split in the middle to maintain a balanced tree.
  295. */
  296. if ((lim1==count)||(lim2==0)) index = count/2;
  297. }
  298. /**
  299. * Select the top RAND_DIM largest values from v and return the index of
  300. * one of these selected at random.
  301. */
  302. int selectDivision(DistanceType* v)
  303. {
  304. int num = 0;
  305. size_t topind[RAND_DIM];
  306. /* Create a list of the indices of the top RAND_DIM values. */
  307. for (size_t i = 0; i < veclen_; ++i) {
  308. if ((num < RAND_DIM)||(v[i] > v[topind[num-1]])) {
  309. /* Put this element at end of topind. */
  310. if (num < RAND_DIM) {
  311. topind[num++] = i; /* Add to list. */
  312. }
  313. else {
  314. topind[num-1] = i; /* Replace last element. */
  315. }
  316. /* Bubble end value down to right location by repeated swapping. */
  317. int j = num - 1;
  318. while (j > 0 && v[topind[j]] > v[topind[j-1]]) {
  319. std::swap(topind[j], topind[j-1]);
  320. --j;
  321. }
  322. }
  323. }
  324. /* Select a random integer in range [0,num-1], and return that index. */
  325. int rnd = rand_int(num);
  326. return (int)topind[rnd];
  327. }
  328. /**
  329. * Subdivide the list of points by a plane perpendicular on axe corresponding
  330. * to the 'cutfeat' dimension at 'cutval' position.
  331. *
  332. * On return:
  333. * dataset[ind[0..lim1-1]][cutfeat]<cutval
  334. * dataset[ind[lim1..lim2-1]][cutfeat]==cutval
  335. * dataset[ind[lim2..count]][cutfeat]>cutval
  336. */
  337. void planeSplit(int* ind, int count, int cutfeat, DistanceType cutval, int& lim1, int& lim2)
  338. {
  339. /* Move vector indices for left subtree to front of list. */
  340. int left = 0;
  341. int right = count-1;
  342. for (;; ) {
  343. while (left<=right && dataset_[ind[left]][cutfeat]<cutval) ++left;
  344. while (left<=right && dataset_[ind[right]][cutfeat]>=cutval) --right;
  345. if (left>right) break;
  346. std::swap(ind[left], ind[right]); ++left; --right;
  347. }
  348. lim1 = left;
  349. right = count-1;
  350. for (;; ) {
  351. while (left<=right && dataset_[ind[left]][cutfeat]<=cutval) ++left;
  352. while (left<=right && dataset_[ind[right]][cutfeat]>cutval) --right;
  353. if (left>right) break;
  354. std::swap(ind[left], ind[right]); ++left; --right;
  355. }
  356. lim2 = left;
  357. }
  358. /**
  359. * Performs an exact nearest neighbor search. The exact search performs a full
  360. * traversal of the tree.
  361. */
  362. void getExactNeighbors(ResultSet<DistanceType>& result, const ElementType* vec, float epsError)
  363. {
  364. // checkID -= 1; /* Set a different unique ID for each search. */
  365. if (trees_ > 1) {
  366. fprintf(stderr,"It doesn't make any sense to use more than one tree for exact search");
  367. }
  368. if (trees_>0) {
  369. searchLevelExact(result, vec, tree_roots_[0], 0.0, epsError);
  370. }
  371. assert(result.full());
  372. }
  373. /**
  374. * Performs the approximate nearest-neighbor search. The search is approximate
  375. * because the tree traversal is abandoned after a given number of descends in
  376. * the tree.
  377. */
  378. void getNeighbors(ResultSet<DistanceType>& result, const ElementType* vec, int maxCheck, float epsError)
  379. {
  380. int i;
  381. BranchSt branch;
  382. int checkCount = 0;
  383. Heap<BranchSt>* heap = new Heap<BranchSt>((int)size_);
  384. DynamicBitset checked(size_);
  385. /* Search once through each tree down to root. */
  386. for (i = 0; i < trees_; ++i) {
  387. searchLevel(result, vec, tree_roots_[i], 0, checkCount, maxCheck, epsError, heap, checked);
  388. }
  389. /* Keep searching other branches from heap until finished. */
  390. while ( heap->popMin(branch) && (checkCount < maxCheck || !result.full() )) {
  391. searchLevel(result, vec, branch.node, branch.mindist, checkCount, maxCheck, epsError, heap, checked);
  392. }
  393. delete heap;
  394. assert(result.full());
  395. }
  396. /**
  397. * Search starting from a given node of the tree. Based on any mismatches at
  398. * higher levels, all exemplars below this level must have a distance of
  399. * at least "mindistsq".
  400. */
  401. void searchLevel(ResultSet<DistanceType>& result_set, const ElementType* vec, NodePtr node, DistanceType mindist, int& checkCount, int maxCheck,
  402. float epsError, Heap<BranchSt>* heap, DynamicBitset& checked)
  403. {
  404. if (result_set.worstDist()<mindist) {
  405. // printf("Ignoring branch, too far\n");
  406. return;
  407. }
  408. /* If this is a leaf node, then do check and return. */
  409. if ((node->child1 == NULL)&&(node->child2 == NULL)) {
  410. /* Do not check same node more than once when searching multiple trees.
  411. Once a vector is checked, we set its location in vind to the
  412. current checkID.
  413. */
  414. int index = node->divfeat;
  415. if ( checked.test(index) || ((checkCount>=maxCheck)&& result_set.full()) ) return;
  416. checked.set(index);
  417. checkCount++;
  418. DistanceType dist = distance_(dataset_[index], vec, veclen_);
  419. result_set.addPoint(dist,index);
  420. return;
  421. }
  422. /* Which child branch should be taken first? */
  423. ElementType val = vec[node->divfeat];
  424. DistanceType diff = val - node->divval;
  425. NodePtr bestChild = (diff < 0) ? node->child1 : node->child2;
  426. NodePtr otherChild = (diff < 0) ? node->child2 : node->child1;
  427. /* Create a branch record for the branch not taken. Add distance
  428. of this feature boundary (we don't attempt to correct for any
  429. use of this feature in a parent node, which is unlikely to
  430. happen and would have only a small effect). Don't bother
  431. adding more branches to heap after halfway point, as cost of
  432. adding exceeds their value.
  433. */
  434. DistanceType new_distsq = mindist + distance_.accum_dist(val, node->divval, node->divfeat);
  435. // if (2 * checkCount < maxCheck || !result.full()) {
  436. if ((new_distsq*epsError < result_set.worstDist())|| !result_set.full()) {
  437. heap->insert( BranchSt(otherChild, new_distsq) );
  438. }
  439. /* Call recursively to search next level down. */
  440. searchLevel(result_set, vec, bestChild, mindist, checkCount, maxCheck, epsError, heap, checked);
  441. }
  442. /**
  443. * Performs an exact search in the tree starting from a node.
  444. */
  445. void searchLevelExact(ResultSet<DistanceType>& result_set, const ElementType* vec, const NodePtr node, DistanceType mindist, const float epsError)
  446. {
  447. /* If this is a leaf node, then do check and return. */
  448. if ((node->child1 == NULL)&&(node->child2 == NULL)) {
  449. int index = node->divfeat;
  450. DistanceType dist = distance_(dataset_[index], vec, veclen_);
  451. result_set.addPoint(dist,index);
  452. return;
  453. }
  454. /* Which child branch should be taken first? */
  455. ElementType val = vec[node->divfeat];
  456. DistanceType diff = val - node->divval;
  457. NodePtr bestChild = (diff < 0) ? node->child1 : node->child2;
  458. NodePtr otherChild = (diff < 0) ? node->child2 : node->child1;
  459. /* Create a branch record for the branch not taken. Add distance
  460. of this feature boundary (we don't attempt to correct for any
  461. use of this feature in a parent node, which is unlikely to
  462. happen and would have only a small effect). Don't bother
  463. adding more branches to heap after halfway point, as cost of
  464. adding exceeds their value.
  465. */
  466. DistanceType new_distsq = mindist + distance_.accum_dist(val, node->divval, node->divfeat);
  467. /* Call recursively to search next level down. */
  468. searchLevelExact(result_set, vec, bestChild, mindist, epsError);
  469. if (new_distsq*epsError<=result_set.worstDist()) {
  470. searchLevelExact(result_set, vec, otherChild, new_distsq, epsError);
  471. }
  472. }
  473. private:
  474. enum
  475. {
  476. /**
  477. * To improve efficiency, only SAMPLE_MEAN random values are used to
  478. * compute the mean and variance at each level when building a tree.
  479. * A value of 100 seems to perform as well as using all values.
  480. */
  481. SAMPLE_MEAN = 100,
  482. /**
  483. * Top random dimensions to consider
  484. *
  485. * When creating random trees, the dimension on which to subdivide is
  486. * selected at random from among the top RAND_DIM dimensions with the
  487. * highest variance. A value of 5 works well.
  488. */
  489. RAND_DIM=5
  490. };
  491. /**
  492. * Number of randomized trees that are used
  493. */
  494. int trees_;
  495. /**
  496. * Array of indices to vectors in the dataset.
  497. */
  498. std::vector<int> vind_;
  499. /**
  500. * The dataset used by this index
  501. */
  502. const Matrix<ElementType> dataset_;
  503. IndexParams index_params_;
  504. size_t size_;
  505. size_t veclen_;
  506. DistanceType* mean_;
  507. DistanceType* var_;
  508. /**
  509. * Array of k-d trees used to find neighbours.
  510. */
  511. NodePtr* tree_roots_;
  512. /**
  513. * Pooled memory allocator.
  514. *
  515. * Using a pooled memory allocator is more efficient
  516. * than allocating memory directly when there is a large
  517. * number small of memory allocations.
  518. */
  519. PooledAllocator pool_;
  520. Distance distance_;
  521. }; // class KDTreeForest
  522. }
  523. #endif //OPENCV_FLANN_KDTREE_INDEX_H_