提交 9aecf295 编写于 作者: M Marius Muja

Added proper copy constructors and assignment operators to indexed

上级 86d21220
......@@ -43,8 +43,6 @@
#include "flann/util/logger.h"
namespace flann
{
......@@ -78,29 +76,49 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
typedef AutotunedIndex<Distance> IndexType;
typedef bool needs_kdtree_distance;
AutotunedIndex(const Matrix<ElementType>& inputData, const IndexParams& params = AutotunedIndexParams(), Distance d = Distance()) :
dataset_(inputData), distance_(d)
BaseClass(params, d), bestIndex_(NULL), speedup_(0), dataset_(inputData)
{
target_precision_ = get_param(params, "target_precision",0.8f);
build_weight_ = get_param(params,"build_weight", 0.01f);
memory_weight_ = get_param(params, "memory_weight", 0.0f);
sample_fraction_ = get_param(params,"sample_fraction", 0.1f);
bestIndex_ = NULL;
}
AutotunedIndex(const AutotunedIndex&);
AutotunedIndex& operator=(const AutotunedIndex&);
AutotunedIndex(const AutotunedIndex& other) : BaseClass(other),
bestParams_(other.bestParams_),
bestSearchParams_(other.bestSearchParams_),
speedup_(other.speedup_),
dataset_(other.dataset_),
target_precision_(other.target_precision_),
build_weight_(other.build_weight_),
memory_weight_(other.memory_weight_),
sample_fraction_(other.sample_fraction_)
{
bestIndex_ = other.bestIndex_->clone();
}
AutotunedIndex& operator=(AutotunedIndex other)
{
this->swap(other);
return * this;
}
virtual ~AutotunedIndex()
{
if (bestIndex_ != NULL) {
delete bestIndex_;
bestIndex_ = NULL;
}
delete bestIndex_;
}
BaseClass* clone() const
{
return new AutotunedIndex(*this);
}
......@@ -664,6 +682,21 @@ private:
return speedup;
}
void swap(AutotunedIndex& other)
{
BaseClass::swap(other);
std::swap(bestIndex_, other.bestIndex_);
std::swap(bestParams_, other.bestParams_);
std::swap(bestSearchParams_, other.bestSearchParams_);
std::swap(speedup_, other.speedup_);
std::swap(dataset_, other.dataset_);
std::swap(target_precision_, other.target_precision_);
std::swap(build_weight_, other.build_weight_);
std::swap(memory_weight_, other.memory_weight_);
std::swap(sample_fraction_, other.sample_fraction_);
}
private:
NNIndex<Distance>* bestIndex_;
......@@ -679,7 +712,7 @@ private:
/**
* The dataset used by this index
*/
const Matrix<ElementType> dataset_;
Matrix<ElementType> dataset_;
/**
* Index parameters
......@@ -689,8 +722,6 @@ private:
float memory_weight_;
float sample_fraction_;
Distance distance_;
USING_BASECLASS_SYMBOLS
};
}
......
......@@ -74,6 +74,8 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
typedef bool needs_kdtree_distance;
/**
......@@ -84,7 +86,7 @@ public:
* @return
*/
CompositeIndex(const IndexParams& params = CompositeIndexParams(), Distance d = Distance()) :
NNIndex<Distance>(params)
BaseClass(params, d)
{
kdtree_index_ = new KDTreeIndex<Distance>(params, d);
kmeans_index_ = new KMeansIndex<Distance>(params, d);
......@@ -92,15 +94,22 @@ public:
}
CompositeIndex(const Matrix<ElementType>& inputData, const IndexParams& params = CompositeIndexParams(),
Distance d = Distance()) : NNIndex<Distance>(params)
Distance d = Distance()) : BaseClass(params, d)
{
kdtree_index_ = new KDTreeIndex<Distance>(inputData, params, d);
kmeans_index_ = new KMeansIndex<Distance>(inputData, params, d);
}
CompositeIndex(const CompositeIndex& other) : BaseClass(other),
kmeans_index_(other.kmeans_index_), kdtree_index_(other.kdtree_index_)
{
}
CompositeIndex(const CompositeIndex&);
CompositeIndex& operator=(const CompositeIndex&);
CompositeIndex& operator=(CompositeIndex other)
{
this->swap(other);
return *this;
}
virtual ~CompositeIndex()
{
......@@ -108,6 +117,11 @@ public:
delete kmeans_index_;
}
BaseClass* clone() const
{
return new CompositeIndex(*this);
}
/**
* @return The index type
*/
......@@ -194,6 +208,13 @@ public:
kdtree_index_->findNeighbors(result, vec, searchParams);
}
protected:
void swap(CompositeIndex& other)
{
std::swap(kmeans_index_, other.kmeans_index_);
std::swap(kdtree_index_, other.kdtree_index_);
}
private:
/** The k-means index */
KMeansIndex<Distance>* kmeans_index_;
......
......@@ -85,6 +85,8 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
/**
* Constructor.
*
......@@ -92,7 +94,7 @@ public:
* @param d
*/
HierarchicalClusteringIndex(const IndexParams& index_params = HierarchicalClusteringIndexParams(), Distance d = Distance())
: NNIndex<Distance>(index_params), distance_(d), size_at_build_(0)
: BaseClass(index_params, d), size_at_build_(0)
{
memoryCounter_ = 0;
......@@ -101,19 +103,7 @@ public:
trees_ = get_param(index_params_,"trees",4);
leaf_size_ = get_param(index_params_,"leaf_size",100);
switch(centers_init_) {
case FLANN_CENTERS_RANDOM:
chooseCenters_ = new RandomCenterChooser<Distance>(d);
break;
case FLANN_CENTERS_GONZALES:
chooseCenters_ = new GonzalesCenterChooser<Distance>(d);
break;
case FLANN_CENTERS_KMEANSPP:
chooseCenters_ = new KMeansppCenterChooser<Distance>(d);
break;
default:
throw FLANNException("Unknown algorithm for choosing initial centers.");
}
initCenterChooser();
}
......@@ -126,7 +116,7 @@ public:
*/
HierarchicalClusteringIndex(const Matrix<ElementType>& inputData, const IndexParams& index_params = HierarchicalClusteringIndexParams(),
Distance d = Distance())
: NNIndex<Distance>(index_params), distance_(d), size_at_build_(0)
: BaseClass(index_params, d), size_at_build_(0)
{
memoryCounter_ = 0;
......@@ -135,27 +125,53 @@ public:
trees_ = get_param(index_params_,"trees",4);
leaf_size_ = get_param(index_params_,"leaf_size",100);
initCenterChooser();
chooseCenters_->setDataset(inputData);
setDataset(inputData);
}
HierarchicalClusteringIndex(const HierarchicalClusteringIndex& other) : BaseClass(other),
size_at_build_(other.size_at_build_),
memoryCounter_(other.memoryCounter_),
branching_(other.branching_),
trees_(other.trees_),
centers_init_(other.centers_init_),
leaf_size_(other.leaf_size_)
{
initCenterChooser();
tree_roots_.resize(other.tree_roots_.size());
for (size_t i=0;i<tree_roots_.size();++i) {
copyTree(tree_roots_[i], other.tree_roots_[i]);
}
}
HierarchicalClusteringIndex& operator=(HierarchicalClusteringIndex other)
{
this->swap(other);
return *this;
}
void initCenterChooser()
{
switch(centers_init_) {
case FLANN_CENTERS_RANDOM:
chooseCenters_ = new RandomCenterChooser<Distance>(d);
chooseCenters_ = new RandomCenterChooser<Distance>(distance_);
break;
case FLANN_CENTERS_GONZALES:
chooseCenters_ = new GonzalesCenterChooser<Distance>(d);
chooseCenters_ = new GonzalesCenterChooser<Distance>(distance_);
break;
case FLANN_CENTERS_KMEANSPP:
chooseCenters_ = new KMeansppCenterChooser<Distance>(d);
chooseCenters_ = new KMeansppCenterChooser<Distance>(distance_);
break;
default:
throw FLANNException("Unknown algorithm for choosing initial centers.");
}
chooseCenters_->setDataset(inputData);
setDataset(inputData);
}
HierarchicalClusteringIndex(const HierarchicalClusteringIndex&);
HierarchicalClusteringIndex& operator=(const HierarchicalClusteringIndex&);
/**
* Index destructor.
*
......@@ -163,20 +179,15 @@ public:
*/
virtual ~HierarchicalClusteringIndex()
{
clearNodeTrees();
delete chooseCenters_;
freeIndex();
}
/**
* Clears Node tree
* calling Node destructor explicitly
*/
void clearNodeTrees(){
for (int i=0; i<trees_; ++i) {
tree_roots_[i]->~Node();
}
BaseClass* clone() const
{
return new HierarchicalClusteringIndex(*this);
}
/**
* Computes the inde memory usage
* Returns: memory used by the index
......@@ -216,8 +227,7 @@ public:
extendDataset(points);
if (rebuild_threshold>1 && size_at_build_*rebuild_threshold<size_) {
clearNodeTrees();
pool_.free();
freeIndex();
buildIndex();
}
else {
......@@ -364,7 +374,8 @@ private:
* destructor
* calling Node destructor explicitly
*/
~Node(){
~Node()
{
for(size_t i=0; i<childs.size(); i++){
childs[i]->~Node();
}
......@@ -414,6 +425,36 @@ private:
typedef BranchStruct<NodePtr, DistanceType> BranchSt;
/**
* Clears Node tree
* calling Node destructor explicitly
*/
void freeIndex(){
for (size_t i=0; i<tree_roots_.size(); ++i) {
tree_roots_[i]->~Node();
}
pool_.free();
}
void copyTree(NodePtr& dst, const NodePtr& src)
{
dst = new(pool_) Node();
dst->pivot_index = src->pivot_index;
dst->pivot = points_[dst->pivot_index];
if (src->childs.size()==0) {
dst->points = src->points;
}
else {
dst->childs.resize(src->childs.size());
for (size_t i=0;i<src->childs.size();++i) {
copyTree(dst->childs[i], src->childs[i]);
}
}
}
void computeLabels(int* indices, int indices_length, int* centers, int centers_length, int* labels, DistanceType& cost)
{
cost = 0;
......@@ -586,6 +627,21 @@ private:
}
}
void swap(HierarchicalClusteringIndex& other)
{
BaseClass::swap(other);
std::swap(tree_roots_, other.tree_roots_);
std::swap(size_at_build_, other.size_at_build_);
std::swap(pool_, other.pool_);
std::swap(memoryCounter_, other.memoryCounter_);
std::swap(branching_, other.branching_);
std::swap(trees_, other.trees_);
std::swap(centers_init_, other.centers_init_);
std::swap(leaf_size_, other.leaf_size_);
std::swap(chooseCenters_, other.chooseCenters_);
}
private:
/**
......@@ -593,11 +649,6 @@ private:
*/
std::vector<Node*> tree_roots_;
/**
* The distance
*/
Distance distance_;
/**
* Number of features in the dataset when the index was last built.
*/
......
......@@ -73,6 +73,8 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
int visited_leafs;
typedef bool needs_kdtree_distance;
......@@ -84,7 +86,7 @@ public:
* params = parameters passed to the kdtree algorithm
*/
KDTreeCuda3dIndex(const IndexParams& params = KDTreeCuda3dIndexParams(), Distance d = Distance() ) :
NNIndex<Distance>(params), distance_(d)
BaseClass(params, d), leaf_count_(0), visited_leafs(0), node_count_(0), current_node_count_(0)
{
int dim_param = get_param(params,"dim",-1);
if (dim_param>0) dim_ = dim_param;
......@@ -101,8 +103,7 @@ public:
* params = parameters passed to the kdtree algorithm
*/
KDTreeCuda3dIndex(const Matrix<ElementType>& inputData, const IndexParams& params = KDTreeCuda3dIndexParams(),
Distance d = Distance() ) :
NNIndex<Distance>(params), distance_(d)
Distance d = Distance() ) : BaseClass(params,d), leaf_count_(0), visited_leafs(0), node_count_(0), current_node_count_(0)
{
int dim_param = get_param(params,"dim",-1);
if (dim_param>0) dim_ = dim_param;
......@@ -113,6 +114,9 @@ public:
setDataset(inputData);
}
KDTreeCuda3dIndex(const KDTreeCuda3dIndex& other);
KDTreeCuda3dIndex operator=(KDTreeCuda3dIndex other);
/**
* Standard destructor
*/
......@@ -122,6 +126,12 @@ public:
clearGpuBuffers();
}
BaseClass* clone() const
{
throw FLANNException("KDTreeCuda3dIndex cloning is not implemented");
}
/**
* Builds the index
*/
......@@ -294,8 +304,6 @@ private:
size_t dim_;
Distance distance_;
USING_BASECLASS_SYMBOLS
}; // class KDTreeCuda3dIndex
......
......@@ -74,6 +74,8 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
typedef bool needs_kdtree_distance;
......@@ -85,7 +87,7 @@ public:
* params = parameters passed to the kdtree algorithm
*/
KDTreeIndex(const IndexParams& params = KDTreeIndexParams(), Distance d = Distance() ) :
NNIndex<Distance>(params), distance_(d)
BaseClass(params, d), size_at_build_(0), mean_(NULL), var_(NULL)
{
trees_ = get_param(index_params_,"trees",4);
}
......@@ -99,15 +101,28 @@ public:
* params = parameters passed to the kdtree algorithm
*/
KDTreeIndex(const Matrix<ElementType>& dataset, const IndexParams& params = KDTreeIndexParams(),
Distance d = Distance() ) : NNIndex<Distance>(params), distance_(d)
Distance d = Distance() ) : BaseClass(params,d ), size_at_build_(0), mean_(NULL), var_(NULL)
{
trees_ = get_param(index_params_,"trees",4);
setDataset(dataset);
}
KDTreeIndex(const KDTreeIndex&);
KDTreeIndex& operator=(const KDTreeIndex&);
KDTreeIndex(const KDTreeIndex& other) : BaseClass(other),
trees_(other.trees_),
size_at_build_(other.size_at_build_)
{
tree_roots_.resize(other.tree_roots_.size());
for (size_t i=0;i<tree_roots_.size();++i) {
copyTree(tree_roots_[i], other.tree_roots_[i]);
}
}
KDTreeIndex& operator=(KDTreeIndex other)
{
this->swap(other);
return *this;
}
/**
* Standard destructor
......@@ -117,6 +132,11 @@ public:
freeIndex();
}
BaseClass* clone() const
{
return new KDTreeIndex(*this);
}
using NNIndex<Distance>::buildIndex;
/**
* Builds the index
......@@ -255,6 +275,8 @@ private:
}
/*--------------------- Internal Data Structures --------------------------*/
struct Node
{
......@@ -318,6 +340,22 @@ private:
typedef BranchSt* Branch;
void copyTree(NodePtr& dst, const NodePtr& src)
{
dst = new(pool_) Node();
dst->divfeat = src->divfeat;
dst->divval = src->divval;
if (src->child1==NULL && src->child2==NULL) {
dst->point = points_[dst->divfeat];
dst->child1 = NULL;
dst->child2 = NULL;
}
else {
copyTree(dst->child1, src->child1);
copyTree(dst->child2, src->child2);
}
}
/**
* Create a tree node that subdivides the list of vecs from vind[first]
* to vind[last]. The routine is called recursively on each sublist.
......@@ -649,6 +687,15 @@ private:
}
}
}
private:
void swap(KDTreeIndex& other)
{
BaseClass::swap(other);
std::swap(trees_, other.trees_);
std::swap(size_at_build_, other.size_at_build_);
std::swap(tree_roots_, other.tree_roots_);
std::swap(pool_, other.pool_);
}
private:
......@@ -695,9 +742,6 @@ private:
*/
PooledAllocator pool_;
Distance distance_;
USING_BASECLASS_SYMBOLS
}; // class KDTreeIndex
......
......@@ -72,6 +72,8 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
typedef bool needs_kdtree_distance;
/**
......@@ -81,13 +83,10 @@ public:
* params = parameters passed to the kdtree algorithm
*/
KDTreeSingleIndex(const IndexParams& params = KDTreeSingleIndexParams(), Distance d = Distance() ) :
NNIndex<Distance>(params), distance_(d)
BaseClass(params, d), root_node_(NULL)
{
leaf_max_size_ = get_param(params,"leaf_max_size",10);
reorder_ = get_param(params, "reorder", true);
data_ = flann::Matrix<ElementType>();
root_node_ = NULL;
}
/**
......@@ -98,20 +97,33 @@ public:
* params = parameters passed to the kdtree algorithm
*/
KDTreeSingleIndex(const Matrix<ElementType>& inputData, const IndexParams& params = KDTreeSingleIndexParams(),
Distance d = Distance() ) :
NNIndex<Distance>(params), distance_(d)
Distance d = Distance() ) : BaseClass(params, d), root_node_(NULL)
{
leaf_max_size_ = get_param(params,"leaf_max_size",10);
reorder_ = get_param(params, "reorder", true);
data_ = flann::Matrix<ElementType>();
root_node_ = NULL;
setDataset(inputData);
}
KDTreeSingleIndex(const KDTreeSingleIndex&);
KDTreeSingleIndex& operator=(const KDTreeSingleIndex&);
KDTreeSingleIndex(const KDTreeSingleIndex& other) : BaseClass(other),
leaf_max_size_(other.leaf_max_size_),
reorder_(other.reorder_),
vind_(other.vind_),
root_bbox_(other.root_bbox_)
{
if (reorder_) {
data_ = flann::Matrix<ElementType>(new ElementType[size_*veclen_], size_, veclen_);
std::copy(other.data_[0], other.data_[0]+size_*veclen_, data_[0]);
}
copyTree(root_node_, other.root_node_);
}
KDTreeSingleIndex& operator=(KDTreeSingleIndex other)
{
this->swap(other);
return *this;
}
/**
* Standard destructor
......@@ -121,6 +133,11 @@ public:
freeIndex();
}
BaseClass* clone() const
{
return new KDTreeSingleIndex(*this);
}
using NNIndex<Distance>::buildIndex;
/**
* Builds the index
......@@ -331,6 +348,16 @@ private:
pool_.free();
}
void copyTree(NodePtr& dst, const NodePtr& src)
{
dst = new(pool_) Node();
*dst = *src;
if (src->child1!=NULL && src->child2!=NULL) {
copyTree(dst->child1, src->child1);
copyTree(dst->child2, src->child2);
}
}
void computeBoundingBox(BoundingBox& bbox)
{
bbox.resize(veclen_);
......@@ -617,6 +644,18 @@ private:
dists[idx] = dst;
}
void swap(KDTreeSingleIndex& other)
{
BaseClass::swap(other);
std::swap(leaf_max_size_, other.leaf_max_size_);
std::swap(reorder_, other.reorder_);
std::swap(vind_, other.vind_);
std::swap(data_, other.data_);
std::swap(root_node_, other.root_node_);
std::swap(root_bbox_, other.root_bbox_);
std::swap(pool_, other.pool_);
}
private:
int leaf_max_size_;
......@@ -649,8 +688,6 @@ private:
*/
PooledAllocator pool_;
Distance distance_;
USING_BASECLASS_SYMBOLS
}; // class KDTreeSingleIndex
......
......@@ -86,6 +86,8 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
typedef bool needs_vector_space_distance;
......@@ -104,10 +106,8 @@ public:
*/
KMeansIndex(const Matrix<ElementType>& inputData, const IndexParams& params = KMeansIndexParams(),
Distance d = Distance())
: NNIndex<Distance>(params), root_(NULL), distance_(d)
: BaseClass(params,d), cb_index_(0.4f), size_at_build_(0), root_(NULL), memoryCounter_(0)
{
memoryCounter_ = 0;
branching_ = get_param(params,"branching",32);
iterations_ = get_param(params,"iterations",11);
if (iterations_<0) {
......@@ -115,22 +115,8 @@ public:
}
centers_init_ = get_param(params,"centers_init",FLANN_CENTERS_RANDOM);
switch(centers_init_) {
case FLANN_CENTERS_RANDOM:
chooseCenters_ = new RandomCenterChooser<Distance>(d);
break;
case FLANN_CENTERS_GONZALES:
chooseCenters_ = new GonzalesCenterChooser<Distance>(d);
break;
case FLANN_CENTERS_KMEANSPP:
chooseCenters_ = new KMeansppCenterChooser<Distance>(d);
break;
default:
throw FLANNException("Unknown algorithm for choosing initial centers.");
}
initCenterChooser();
chooseCenters_->setDataset(inputData);
cb_index_ = 0.4f;
setDataset(inputData);
}
......@@ -144,10 +130,9 @@ public:
* params = parameters passed to the hierarchical k-means algorithm
*/
KMeansIndex(const IndexParams& params = KMeansIndexParams(), Distance d = Distance())
: NNIndex<Distance>(params), root_(NULL), distance_(d)
: BaseClass(params, d), cb_index_(0.4f), size_at_build_(0),
root_(NULL), memoryCounter_(0)
{
memoryCounter_ = 0;
branching_ = get_param(params,"branching",32);
iterations_ = get_param(params,"iterations",11);
if (iterations_<0) {
......@@ -155,27 +140,47 @@ public:
}
centers_init_ = get_param(params,"centers_init",FLANN_CENTERS_RANDOM);
initCenterChooser();
}
KMeansIndex(const KMeansIndex& other) : BaseClass(other),
branching_(other.branching_),
iterations_(other.iterations_),
centers_init_(other.centers_init_),
cb_index_(other.cb_index_),
size_at_build_(other.size_at_build_),
memoryCounter_(other.memoryCounter_)
{
initCenterChooser();
copyTree(root_, other.root_);
}
KMeansIndex& operator=(KMeansIndex other)
{
this->swap(other);
return *this;
}
void initCenterChooser()
{
switch(centers_init_) {
case FLANN_CENTERS_RANDOM:
chooseCenters_ = new RandomCenterChooser<Distance>(d);
chooseCenters_ = new RandomCenterChooser<Distance>(distance_);
break;
case FLANN_CENTERS_GONZALES:
chooseCenters_ = new GonzalesCenterChooser<Distance>(d);
chooseCenters_ = new GonzalesCenterChooser<Distance>(distance_);
break;
case FLANN_CENTERS_KMEANSPP:
chooseCenters_ = new KMeansppCenterChooser<Distance>(d);
chooseCenters_ = new KMeansppCenterChooser<Distance>(distance_);
break;
default:
throw FLANNException("Unknown algorithm for choosing initial centers.");
}
cb_index_ = 0.4f;
}
KMeansIndex(const KMeansIndex&);
KMeansIndex& operator=(const KMeansIndex&);
/**
* Index destructor.
*
......@@ -183,9 +188,16 @@ public:
*/
virtual ~KMeansIndex()
{
delete chooseCenters_;
freeIndex();
}
BaseClass* clone() const
{
return new KMeansIndex(*this);
}
void set_cb_index( float index)
{
cb_index_ = index;
......@@ -469,6 +481,27 @@ private:
pool_.free();
}
void copyTree(NodePtr& dst, const NodePtr& src)
{
dst = new(pool_) Node();
dst->pivot = new DistanceType[veclen_];
std::copy(src->pivot, src->pivot+veclen_, dst->pivot);
dst->radius = src->radius;
dst->variance = src->variance;
dst->size = src->size;
if (src->childs.size()==0) {
dst->points = src->points;
}
else {
dst->childs.resize(src->childs.size());
for (size_t i=0;i<src->childs.size();++i) {
copyTree(dst->childs[i], src->childs[i]);
}
}
}
/**
* Computes the statistics of a node (mean, radius, variance).
*
......@@ -961,6 +994,20 @@ private:
}
void swap(KMeansIndex& other)
{
std::swap(branching_, other.branching_);
std::swap(iterations_, other.iterations_);
std::swap(centers_init_, other.centers_init_);
std::swap(cb_index_, other.cb_index_);
std::swap(size_at_build_, other.size_at_build_);
std::swap(root_, other.root_);
std::swap(pool_, other.pool_);
std::swap(memoryCounter_, other.memoryCounter_);
std::swap(chooseCenters_, other.chooseCenters_);
}
private:
/** The branching factor used in the hierarchical k-means clustering */
int branching_;
......@@ -989,11 +1036,6 @@ private:
*/
NodePtr root_;
/**
* The distance
*/
Distance distance_;
/**
* Pooled memory allocator.
*/
......
......@@ -53,32 +53,44 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
LinearIndex(const IndexParams& params = LinearIndexParams(), Distance d = Distance()) :
NNIndex<Distance>(params), distance_(d)
BaseClass(params, d)
{
}
LinearIndex(const Matrix<ElementType>& input_data, const IndexParams& params = LinearIndexParams(), Distance d = Distance()) :
NNIndex<Distance>(params), distance_(d)
BaseClass(params, d)
{
setDataset(input_data);
}
LinearIndex(const LinearIndex& other) : BaseClass(other)
{
}
LinearIndex& operator=(LinearIndex other)
{
this->swap(other);
return *this;
}
virtual ~LinearIndex()
{
}
BaseClass* clone() const
{
return new LinearIndex(*this);
}
void addPoints(const Matrix<ElementType>& points, float rebuild_threshold = 2)
{
assert(points.cols==veclen_);
extendDataset(points);
}
LinearIndex(const LinearIndex&);
LinearIndex& operator=(const LinearIndex&);
flann_algorithm_t getType() const
{
return FLANN_INDEX_LINEAR;
......@@ -130,10 +142,7 @@ public:
}
}
private:
/** Index distance */
Distance distance_;
USING_BASECLASS_SYMBOLS
};
......
......@@ -81,12 +81,14 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
typedef NNIndex<Distance> BaseClass;
/** Constructor
* @param params parameters passed to the LSH algorithm
* @param d the distance used
*/
LshIndex(const IndexParams& params = LshIndexParams(), Distance d = Distance()) :
NNIndex<Distance>(params), distance_(d)
BaseClass(params, d), size_at_build_(0)
{
table_number_ = get_param<unsigned int>(index_params_,"table_number",12);
key_size_ = get_param<unsigned int>(index_params_,"key_size",20);
......@@ -102,7 +104,7 @@ public:
* @param d the distance used
*/
LshIndex(const Matrix<ElementType>& input_data, const IndexParams& params = LshIndexParams(), Distance d = Distance()) :
NNIndex<Distance>(params), distance_(d)
BaseClass(params, d), size_at_build_(0)
{
table_number_ = get_param<unsigned int>(index_params_,"table_number",12);
key_size_ = get_param<unsigned int>(index_params_,"key_size",20);
......@@ -113,13 +115,32 @@ public:
setDataset(input_data);
}
LshIndex(const LshIndex& other) : BaseClass(other),
tables_(other.tables_),
size_at_build_(other.size_at_build_),
table_number_(other.table_number_),
key_size_(other.key_size_),
multi_probe_level_(other.multi_probe_level_),
xor_masks_(other.xor_masks_)
{
}
LshIndex& operator=(LshIndex other)
{
this->swap(other);
return *this;
}
virtual ~LshIndex()
{
}
LshIndex(const LshIndex&);
LshIndex& operator=(const LshIndex&);
BaseClass* clone() const
{
return new LshIndex(*this);
}
using NNIndex<Distance>::buildIndex;
/**
......@@ -470,6 +491,18 @@ private:
}
}
void swap(LshIndex& other)
{
BaseClass::swap(other);
std::swap(tables_, other.tables_);
std::swap(size_at_build_, other.size_at_build_);
std::swap(table_number_, other.table_number_);
std::swap(key_size_, other.key_size_);
std::swap(multi_probe_level_, other.multi_probe_level_);
std::swap(xor_masks_, other.xor_masks_);
}
/** The different hash tables */
std::vector<lsh::LshTable<ElementType> > tables_;
......@@ -486,8 +519,6 @@ private:
/** The XOR masks to apply to a key to get the neighboring buckets */
std::vector<lsh::BucketKey> xor_masks_;
Distance distance_;
USING_BASECLASS_SYMBOLS
};
}
......
......@@ -88,14 +88,36 @@ public:
typedef typename Distance::ElementType ElementType;
typedef typename Distance::ResultType DistanceType;
NNIndex() : data_ptr_(NULL)
NNIndex(Distance d) : distance_(d), last_id_(0), size_(0), veclen_(0), data_ptr_(NULL), removed_(false)
{
}
NNIndex(const IndexParams& params) : index_params_(params), data_ptr_(NULL)
NNIndex(const IndexParams& params, Distance d) : distance_(d), last_id_(0), size_(0), veclen_(0),
index_params_(params), data_ptr_(NULL), removed_(false)
{
}
NNIndex(const NNIndex& other) :
distance_(other.distance_),
last_id_(other.last_id_),
size_(other.size_),
veclen_(other.veclen_),
index_params_(other.index_params_),
removed_points_(other.removed_points_),
ids_(other.ids_),
points_(other.points_),
data_ptr_(NULL),
removed_(other.removed_)
{
if (other.data_ptr_) {
data_ptr_ = new ElementType[size_*veclen_];
std::copy(other.data_ptr_, other.data_ptr_+size_*veclen_, data_ptr_);
for (size_t i=0;i<size_;++i) {
points_[i] = data_ptr_ + i*veclen_;
}
}
}
virtual ~NNIndex()
{
if (data_ptr_) {
......@@ -103,6 +125,9 @@ public:
}
}
virtual NNIndex* clone() const = 0;
/**
* Builds the index
*/
......@@ -765,10 +790,28 @@ protected:
size_ = last_idx;
}
void swap(NNIndex& other)
{
std::swap(distance_, other.distance_);
std::swap(last_id_, other.last_id_);
std::swap(size_, other.size_);
std::swap(veclen_, other.veclen_);
std::swap(index_params_, other.index_params_);
std::swap(removed_points_, other.removed_points_);
std::swap(ids_, other.ids_);
std::swap(points_, other.points_);
std::swap(data_ptr_, other.data_ptr_);
std::swap(removed_, other.removed_);
}
protected:
/**
* The distance functor
*/
Distance distance_;
/**
* Each index point has an associated ID. IDs are assigned sequentially in
* increasing order. This indicates the ID assigned to the last point added to the
......@@ -820,16 +863,17 @@ protected:
#define USING_BASECLASS_SYMBOLS \
using NNIndex<Distance>::size_;\
using NNIndex<Distance>::veclen_;\
using NNIndex<Distance>::index_params_;\
using NNIndex<Distance>::removed_points_;\
using NNIndex<Distance>::ids_;\
using NNIndex<Distance>::removed_;\
using NNIndex<Distance>::points_;\
using NNIndex<Distance>::extendDataset;\
using NNIndex<Distance>::setDataset;\
using NNIndex<Distance>::cleanRemovedPoints;
using NNIndex<Distance>::distance_;\
using NNIndex<Distance>::size_;\
using NNIndex<Distance>::veclen_;\
using NNIndex<Distance>::index_params_;\
using NNIndex<Distance>::removed_points_;\
using NNIndex<Distance>::ids_;\
using NNIndex<Distance>::removed_;\
using NNIndex<Distance>::points_;\
using NNIndex<Distance>::extendDataset;\
using NNIndex<Distance>::setDataset;\
using NNIndex<Distance>::cleanRemovedPoints;
......
......@@ -72,11 +72,6 @@ struct SavedIndexParams : public IndexParams
template<typename Distance>
class Index
{
......@@ -101,6 +96,18 @@ public:
}
}
Index(const Index& other) : loaded_(other.loaded_), index_params_(other.index_params_)
{
nnIndex_ = other.nnIndex_->clone();
}
Index& operator=(Index other)
{
this->swap(other);
return *this;
}
virtual ~Index()
{
delete nnIndex_;
......@@ -340,9 +347,6 @@ private:
if (header.data_type != flann_datatype_value<ElementType>::value) {
throw FLANNException("Datatype of saved index is different than of the one to be created.");
}
// if ((size_t(header.rows) != dataset.rows)||(size_t(header.cols) != dataset.cols)) {
// throw FLANNException("The index saved belongs to a different dataset");
// }
IndexParams params;
params["algorithm"] = header.index_type;
......@@ -354,6 +358,13 @@ private:
return nnIndex;
}
void swap( Index& other)
{
std::swap(nnIndex_, other.nnIndex_);
std::swap(loaded_, other.loaded_);
std::swap(index_params_, other.index_params_);
}
private:
/** Pointer to actual index class */
IndexType* nnIndex_;
......
......@@ -134,7 +134,7 @@ SMALL_POLICY(unsigned long);
SMALL_POLICY(float);
SMALL_POLICY(bool);
#undef SMALL_POLICY
//#undef SMALL_POLICY
/// This function will return a different policy for each type.
template<typename T>
......
......@@ -54,7 +54,7 @@ class DynamicBitset
public:
/** @param default constructor
*/
DynamicBitset()
DynamicBitset() : size_(0)
{
}
......
......@@ -39,6 +39,15 @@
namespace flann
{
namespace anyimpl
{
SMALL_POLICY(flann_algorithm_t);
SMALL_POLICY(flann_centers_init_t);
SMALL_POLICY(flann_log_level_t);
SMALL_POLICY(flann_datatype_t);
}
typedef std::map<std::string, any> IndexParams;
......
......@@ -65,58 +65,118 @@ TEST_F(Autotuned_SIFT100K, TestSearch)
printf("Precision: %g\n", precision);
}
//
//TEST_F(Autotuned_SIFT100K, SavedTest)
//{
// float precision;
//
// // -------------------------------------
// // kd-tree index
// printf("Loading kdtree index\n");
// flann::Index<L2<float> > kdtree_index(data, flann::SavedIndexParams("kdtree.idx"));
//
// start_timer("Searching KNN...");
// kdtree_index.knnSearch(query, indices, dists, 5, flann::SearchParams(128) );
// printf("done (%g seconds)\n", stop_timer());
//
// precision = compute_precision(match, indices);
// EXPECT_GE(precision, 0.75);
// printf("Precision: %g\n", precision);
//
// // -------------------------------------
// // kmeans index
// printf("Loading kmeans index\n");
// flann::Index<L2<float> > kmeans_index(data, flann::SavedIndexParams("kmeans_tree.idx"));
//
// start_timer("Searching KNN...");
// kmeans_index.knnSearch(query, indices, dists, 5, flann::SearchParams(96) );
// printf("done (%g seconds)\n", stop_timer());
//
// precision = compute_precision(match, indices);
// EXPECT_GE(precision, 0.75);
// printf("Precision: %g\n", precision);
//
// // -------------------------------------
// // autotuned index
// printf("Loading autotuned index\n");
// flann::Index<L2<float> > autotuned_index(data, flann::SavedIndexParams("autotuned.idx"));
//
// const flann::IndexParams index_params = autotuned_index.getParameters();
// printf("The index has the following parameters:\n");
// flann::print_params(index_params);
//
// printf("Index type is: %d\n", autotuned_index.getType());
//
// start_timer("Searching KNN...");
// autotuned_index.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
// printf("done (%g seconds)\n", stop_timer());
//
// precision = compute_precision(match, indices);
// EXPECT_GE(precision, 0.75);
// printf("Precision: %g\n", precision);
//}
//
TEST_F(Autotuned_SIFT100K, SavedTest)
{
float precision;
// -------------------------------------
// autotuned index
printf("Loading autotuned index\n");
flann::Index<L2<float> > autotuned_index(data, flann::SavedIndexParams("autotuned.idx"));
const flann::IndexParams index_params = autotuned_index.getParameters();
printf("The index has the following parameters:\n");
flann::print_params(index_params);
printf("Index type is: %d\n", autotuned_index.getType());
start_timer("Searching KNN...");
autotuned_index.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
printf("done (%g seconds)\n", stop_timer());
precision = compute_precision(match, indices);
EXPECT_GE(precision, 0.75);
printf("Precision: %g\n", precision);
}
TEST_F(Autotuned_SIFT100K, TestCopy)
{
float precision;
// -------------------------------------
// autotuned index
printf("Loading autotuned index\n");
flann::Index<L2<float> > index(data, flann::SavedIndexParams("autotuned.idx"));
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
printf("done (%g seconds)\n", stop_timer());
precision = compute_precision(match, indices);
EXPECT_GE(precision, 0.75);
printf("Precision: %g\n", precision);
// test copy constructor
Index<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
Index<L2<float> > index3(data, flann::LinearIndexParams());
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
TEST_F(Autotuned_SIFT100K, TestCopy2)
{
float precision;
// -------------------------------------
// autotuned index
printf("Loading autotuned index\n");
flann::AutotunedIndex<L2<float> > index(data);
FILE* f = fopen("autotuned.idx", "r");
index.loadIndex(f);
fclose(f);
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
printf("done (%g seconds)\n", stop_timer());
precision = compute_precision(match, indices);
EXPECT_GE(precision, 0.75);
printf("Precision: %g\n", precision);
// test copy constructor
AutotunedIndex<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
AutotunedIndex<L2<float> > index3(data);
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, 5, flann::SearchParams(-2) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
......
......@@ -193,6 +193,86 @@ TEST_F(HierarchicalIndex_Brief100K, TestSave)
}
TEST_F(HierarchicalIndex_Brief100K, TestCopy)
{
flann::Index<Distance> index(data, flann::HierarchicalClusteringIndexParams());
start_timer("Building hierarchical clustering index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::Index<Distance> index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
flann::Index<Distance> index3(data, flann::HierarchicalClusteringIndexParams());
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
TEST_F(HierarchicalIndex_Brief100K, TestCopy2)
{
flann::HierarchicalClusteringIndex<Distance> index(data, flann::HierarchicalClusteringIndexParams());
start_timer("Building hierarchical clustering index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::HierarchicalClusteringIndex<Distance > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
flann::HierarchicalClusteringIndex<Distance> index3(data, flann::HierarchicalClusteringIndexParams());
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
int main(int argc, char** argv)
{
testing::InitGoogleTest(&argc, argv);
......
......@@ -184,6 +184,161 @@ TEST_F(KDTreeSingle, TestSaveReorder)
printf("Precision: %g\n", precision);
}
TEST_F(KDTreeSingle, TestCopy)
{
flann::Index<L2_Simple<float> > index(data, flann::KDTreeSingleIndexParams(12, false));
start_timer("Building k-d tree index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::Index<L2_Simple<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
flann::Index<L2_Simple<float> > index3(data, flann::KDTreeSingleIndexParams(12, false));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
// repeat tests with reorder=true
flann::Index<L2_Simple<float> > index4(data, flann::KDTreeSingleIndexParams(12, true));
start_timer("Building k-d tree index...");
index4.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index4.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::Index<L2_Simple<float> > index5(index4);
start_timer("Searching KNN...");
index5.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision4 = compute_precision(match, indices);
printf("Precision: %g\n", precision4);
EXPECT_EQ(precision, precision4);
// test assignment operator
flann::Index<L2_Simple<float> > index6(data, flann::KDTreeSingleIndexParams(12, false));
index6 = index4;
start_timer("Searching KNN...");
index6.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision5 = compute_precision(match, indices);
printf("Precision: %g\n", precision5);
EXPECT_EQ(precision, precision5);
}
TEST_F(KDTreeSingle, TestCopy2)
{
flann::KDTreeSingleIndex<L2_Simple<float> > index(data, flann::KDTreeSingleIndexParams(12, false));
start_timer("Building k-d tree index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::KDTreeSingleIndex<L2_Simple<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
flann::KDTreeSingleIndex<L2_Simple<float> > index3(data, flann::KDTreeSingleIndexParams(12, false));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
// repeat tests with reorder=true
flann::KDTreeSingleIndex<L2_Simple<float> > index4(data, flann::KDTreeSingleIndexParams(12, true));
start_timer("Building k-d tree index...");
index4.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index4.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::KDTreeSingleIndex<L2_Simple<float> > index5(index4);
start_timer("Searching KNN...");
index5.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision4 = compute_precision(match, indices);
printf("Precision: %g\n", precision4);
EXPECT_EQ(precision, precision4);
// test assignment operator
flann::KDTreeSingleIndex<L2_Simple<float> > index6(data, flann::KDTreeSingleIndexParams(12, false));
index6 = index4;
start_timer("Searching KNN...");
index6.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision5 = compute_precision(match, indices);
printf("Precision: %g\n", precision5);
EXPECT_EQ(precision, precision5);
}
int main(int argc, char** argv)
{
testing::InitGoogleTest(&argc, argv);
......
......@@ -150,7 +150,86 @@ TEST_F(KDTree_SIFT10K, TestSave)
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
}
TEST_F(KDTree_SIFT10K, TestCopy)
{
Index<L2<float> > index(data, flann::KDTreeIndexParams(4));
start_timer("Building k-d tree index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
Index<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
Index<L2<float> > index3(data, flann::KDTreeIndexParams(4));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
TEST_F(KDTree_SIFT10K, TestCopy2)
{
KDTreeIndex<L2<float> > index(data, flann::KDTreeIndexParams(4));
start_timer("Building k-d tree index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
KDTreeIndex<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
KDTreeIndex<L2<float> > index3(data, flann::KDTreeIndexParams(4));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
......
......@@ -154,6 +154,85 @@ TEST_F(KMeans_SIFT10K, TestSave)
}
TEST_F(KMeans_SIFT10K, TestCopy)
{
Index<L2<float> > index(data, flann::KMeansIndexParams(7, 3, FLANN_CENTERS_RANDOM, 0.4));
start_timer("Building kmeans index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(128) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
Index<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(128) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
Index<L2<float> > index3(data, flann::KMeansIndexParams(7, 3, FLANN_CENTERS_RANDOM, 0.4));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(128) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
TEST_F(KMeans_SIFT10K, TestCopy2)
{
KMeansIndex<L2<float> > index(data, flann::KMeansIndexParams(7, 3, FLANN_CENTERS_RANDOM, 0.4));
start_timer("Building kmeans index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(128) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
KMeansIndex<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(128) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
KMeansIndex<L2<float> > index3(data, flann::KMeansIndexParams(7, 3, FLANN_CENTERS_RANDOM, 0.4));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(128) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
/**
* Test fixture for SIFT 100K dataset
*/
......
......@@ -109,6 +109,84 @@ TEST_F(Linear_SIFT10K, TestSave)
EXPECT_EQ(precision, precision2);
}
TEST_F(Linear_SIFT10K, TestCopy)
{
Index<L2<float> > index(data, flann::LinearIndexParams());
start_timer("Building k-d tree index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_EQ(precision, 1);
// test copy constructor
Index<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
Index<L2<float> > index3(data, flann::LinearIndexParams());
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
TEST_F(Linear_SIFT10K, TestCopy2)
{
LinearIndex<L2<float> > index(data, flann::LinearIndexParams());
start_timer("Building k-d tree index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_EQ(precision, 1);
// test copy constructor
LinearIndex<L2<float> > index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
LinearIndex<L2<float> > index3(data, flann::LinearIndexParams());
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, knn, flann::SearchParams(256) );
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
/**
......
......@@ -195,6 +195,85 @@ TEST_F(LshIndex_Brief100K, SavedTest)
}
TEST_F(LshIndex_Brief100K, TestCopy)
{
flann::Index<Distance> index(data, flann::LshIndexParams(12, 20, 2));
start_timer("Building LSH index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(-1));
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::Index<Distance> index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(-1));
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
flann::Index<Distance> index3(data, flann::LshIndexParams(12, 20, 2));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
TEST_F(LshIndex_Brief100K, TestCopy2)
{
flann::LshIndex<Distance> index(data, flann::LshIndexParams(12, 20, 2));
start_timer("Building LSH index...");
index.buildIndex();
printf("done (%g seconds)\n", stop_timer());
start_timer("Searching KNN...");
index.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(-1));
printf("done (%g seconds)\n", stop_timer());
float precision = compute_precision(match, indices);
printf("Precision: %g\n", precision);
EXPECT_GE(precision, 0.75);
// test copy constructor
flann::LshIndex<Distance> index2(index);
start_timer("Searching KNN...");
index2.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(-1));
printf("done (%g seconds)\n", stop_timer());
float precision2 = compute_precision(match, indices);
printf("Precision: %g\n", precision2);
EXPECT_EQ(precision, precision2);
// test assignment operator
flann::LshIndex<Distance> index3(data, flann::LshIndexParams(12, 20, 2));
index3 = index;
start_timer("Searching KNN...");
index3.knnSearch(query, indices, dists, k_nn_, flann::SearchParams(2000));
printf("done (%g seconds)\n", stop_timer());
float precision3 = compute_precision(match, indices);
printf("Precision: %g\n", precision3);
EXPECT_EQ(precision, precision3);
}
int main(int argc, char** argv)
{
testing::InitGoogleTest(&argc, argv);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册