/** * @file testNullSpaceNovelty.cpp * @brief test function for class KCNullSpaceNovelty * @author Paul Bodesheim * @date 28-11-2012 (dd-mm-yyyy) */ #include #include #include #ifdef NICE_USELIB_MATIO #include "core/basics/Config.h" #include "core/basics/Timer.h" #include "core/vector/Algorithms.h" #include "core/vector/SparseVectorT.h" #include "vislearning/classifier/kernelclassifier/KCNullSpaceNovelty.h" #include "vislearning/math/kernels/KernelData.h" #include "vislearning/cbaselib/ClassificationResults.h" #include "vislearning/baselib/ProgressBar.h" #include "core/matlabAccess/MatFileIO.h" #include "vislearning/matlabAccessHighLevel/ImageNetData.h" using namespace std; using namespace NICE; using namespace OBJREC; // --------------- THE KERNEL FUNCTION ( exponential kernel with euclidian distance ) ---------------------- double measureDistance ( const NICE::SparseVector & a, const NICE::SparseVector & b, const double & sigma = 2.0) { double inner_sum(0.0); double d; //new version, where we needed on average 0.001707 s for each test sample NICE::SparseVector::const_iterator aIt = a.begin(); NICE::SparseVector::const_iterator bIt = b.begin(); //compute the euclidian distance between both feature vectores (given as SparseVectors) while ( (aIt != a.end()) && (bIt != b.end()) ) { if (aIt->first == bIt->first) { d = ( aIt->second - bIt->second ); inner_sum += d * d; aIt++; bIt++; } else if ( aIt->first < bIt->first) { inner_sum += aIt->second * aIt->second; aIt++; } else { inner_sum += bIt->second * bIt->second; bIt++; } } //compute remaining values, if b reached the end but not a while (aIt != a.end()) { inner_sum += aIt->second * aIt->second; aIt++; } //compute remaining values, if a reached the end but not b while (bIt != b.end()) { inner_sum += bIt->second * bIt->second; bIt++; } //normalization of the exponent inner_sum /= (2.0*sigma*sigma); //finally, compute the RBF-kernel score (RBF = radial basis function) return exp(-inner_sum); } // --------------- THE KERNEL FUNCTION ( HIK ) ---------------------- double minimumDistance ( const NICE::SparseVector & a, const NICE::SparseVector & b ) { double inner_sum(0.0); NICE::SparseVector::const_iterator aIt = a.begin(); NICE::SparseVector::const_iterator bIt = b.begin(); //compute the minimum distance between both feature vectores (given as SparseVectors) while ( (aIt != a.end()) && (bIt != b.end()) ) { if (aIt->first == bIt->first) { inner_sum += std::min( aIt->second , bIt->second ); aIt++; bIt++; } else if ( aIt->first < bIt->first) { aIt++; } else { bIt++; } } return inner_sum; } /** test the basic functionality of fast-hik hyperparameter optimization */ int main (int argc, char **argv) { std::set_terminate(__gnu_cxx::__verbose_terminate_handler); Config conf ( argc, argv ); string resultsfile = conf.gS("main", "results", "results.txt" ); int nrOfExamplesPerClass = conf.gI("main", "nrOfExamplesPerClass", 100); nrOfExamplesPerClass = std::min(nrOfExamplesPerClass, 100); // we do not have more than 100 examples per class // -------- read ImageNet data -------------- std::vector trainingData; NICE::Vector y; std::cerr << "Reading ImageNet data ..." << std::endl; bool imageNetLocal = conf.gB("main", "imageNetLocal" , false); string imageNetPath; if (imageNetLocal) imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/"; else imageNetPath = "/home/dbv/bilder/imagenet/devkit-1.0/"; ImageNetData imageNetTrain ( imageNetPath + "demo/" ); imageNetTrain.preloadData( "train", "training" ); trainingData = imageNetTrain.getPreloadedData(); y = imageNetTrain.getPreloadedLabels(); std::cerr << "Reading of training data finished" << std::endl; std::cerr << "trainingData.size(): " << trainingData.size() << std::endl; std::cerr << "y.size(): " << y.size() << std::endl; std::cerr << "Reading ImageNet test data files (takes some seconds)..." << std::endl; ImageNetData imageNetTest ( imageNetPath + "demo/" ); imageNetTest.preloadData ( "val", "testing" ); imageNetTest.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" ); // -------- select training set ------------- NICE::Vector knownClassLabels(5,0.0); for (int k=1; k<6; k++) knownClassLabels(k-1) = k; std::vector currentTrainingData; currentTrainingData.clear(); NICE::Vector currentTrainingLabels(nrOfExamplesPerClass*knownClassLabels.size(),0); int k(0); for (size_t i = 0; i < y.size(); i++) { for (size_t j=0; j::iterator itt; for (itt = ( (std::map) knfst.getTrainingSetStatistic() ).begin(); itt != knfst.getTrainingSetStatistic().end(); itt++) std::cerr << (*itt).first << " " << (*itt).second << std::endl; std::cerr << "one-class setting?: " << knfst.isOneClass() << std::endl; std::cerr << "null space dimension: "<< knfst.getNullSpaceDimension() << std::endl; std::cerr << "target points: " << std::endl; for (size_t k=0; k