|
@@ -1,13 +1,16 @@
|
|
|
|
|
|
#include "FeatureLearningPrototypes.h"
|
|
|
|
|
|
+//STL
|
|
|
#include <iostream>
|
|
|
|
|
|
+//core
|
|
|
#include <core/image/FilterT.h>
|
|
|
#include <core/image/CircleT.h>
|
|
|
#include <core/image/Convert.h>
|
|
|
#include <core/vector/VectorT.h>
|
|
|
|
|
|
+//vislearning
|
|
|
#include <vislearning/features/localfeatures/LFonHSG.h>
|
|
|
#include <vislearning/features/localfeatures/LFColorSande.h>
|
|
|
#include <vislearning/features/localfeatures/LFColorWeijer.h>
|
|
@@ -48,6 +51,79 @@ void FeatureLearningPrototypes::setClusterAlgo( const std::string & _clusterAlgo
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+void FeatureLearningPrototypes::setFeatureExtractor( const bool & _setForTraining )
|
|
|
+{
|
|
|
+ //be careful with previously allocated memory
|
|
|
+ if (this->featureExtractor != NULL)
|
|
|
+ delete featureExtractor;
|
|
|
+
|
|
|
+ //feature stuff
|
|
|
+ // which OpponentSIFT implementation to use {NICE, VANDESANDE}
|
|
|
+ std::string opSiftImpl;
|
|
|
+ opSiftImpl = this->conf->gS ( "Descriptor", "implementation", "VANDESANDE" );
|
|
|
+ // read features?
|
|
|
+ bool readfeat;
|
|
|
+ readfeat = this->conf->gB ( "Descriptor", "read", true );
|
|
|
+ // write features?
|
|
|
+ bool writefeat;
|
|
|
+ writefeat = this->conf->gB ( "Descriptor", "write", true );
|
|
|
+
|
|
|
+ // Welche Opponentsift Implementierung soll genutzt werden ?
|
|
|
+ LocalFeatureRepresentation *cSIFT = NULL;
|
|
|
+ LocalFeatureRepresentation *writeFeats = NULL;
|
|
|
+ LocalFeatureRepresentation *readFeats = NULL;
|
|
|
+ this->featureExtractor = NULL;
|
|
|
+ if ( opSiftImpl == "NICE" )
|
|
|
+ {
|
|
|
+ if ( _setForTraining )
|
|
|
+ cSIFT = new OBJREC::LFonHSG ( this->conf, "HSGtrain" );
|
|
|
+ else
|
|
|
+ cSIFT = new OBJREC::LFonHSG ( this->conf, "HSGtest" );
|
|
|
+ }
|
|
|
+ else if ( opSiftImpl == "VANDESANDE" )
|
|
|
+ {
|
|
|
+ if ( _setForTraining )
|
|
|
+ cSIFT = new OBJREC::LFColorSande ( this->conf, "LFColorSandeTrain" );
|
|
|
+ else
|
|
|
+ cSIFT = new OBJREC::LFColorSande ( this->conf, "LFColorSandeTest" );
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ fthrow ( Exception, "feattype: %s not yet supported" << opSiftImpl );
|
|
|
+ }
|
|
|
+
|
|
|
+ this->featureExtractor = cSIFT;
|
|
|
+
|
|
|
+ if ( writefeat )
|
|
|
+ {
|
|
|
+ // write the features to a file, if there isn't any to read
|
|
|
+ writeFeats = new LFWriteCache ( this->conf, cSIFT );
|
|
|
+ this->featureExtractor = writeFeats;
|
|
|
+ }
|
|
|
+
|
|
|
+ if ( readfeat )
|
|
|
+ {
|
|
|
+ // read the features from a file
|
|
|
+ if ( writefeat )
|
|
|
+ {
|
|
|
+ readFeats = new LFReadCache ( this->conf, writeFeats, -1 );
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ readFeats = new LFReadCache ( this->conf, cSIFT, -1 );
|
|
|
+ }
|
|
|
+ this->featureExtractor = readFeats;
|
|
|
+ }
|
|
|
+
|
|
|
+ //only set feature stuff to NULL, deletion of the underlying object is done in the destructor
|
|
|
+ if ( cSIFT != NULL )
|
|
|
+ cSIFT = NULL;
|
|
|
+ if ( writeFeats != NULL )
|
|
|
+ writeFeats = NULL;
|
|
|
+ if ( readFeats != NULL )
|
|
|
+ readFeats = NULL ;
|
|
|
+}
|
|
|
+
|
|
|
void FeatureLearningPrototypes::extractFeaturesFromTrainingImages( const OBJREC::MultiDataset *_md, NICE::VVector & examplesTraining )
|
|
|
{
|
|
|
examplesTraining.clear();
|
|
@@ -130,6 +206,12 @@ void FeatureLearningPrototypes::train ( const OBJREC::MultiDataset *_md )
|
|
|
clusterAlgo->cluster ( examplesTraining, prototypes, weights, assignment);
|
|
|
weights.clear();
|
|
|
assignment.clear();
|
|
|
+
|
|
|
+ //normalization
|
|
|
+ for (NICE::VVector::iterator protoIt = prototypes.begin(); protoIt != prototypes.end(); protoIt++)
|
|
|
+ {
|
|
|
+ protoIt->normalizeL1();
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
this->writeInitialCodebook();
|
|
@@ -181,6 +263,52 @@ bool FeatureLearningPrototypes::writeInitialCodebook ( )
|
|
|
}
|
|
|
|
|
|
|
|
|
+void FeatureLearningPrototypes::evaluateCurrentCodebookForGivenFeatures( const NICE::VVector & _features,
|
|
|
+ const NICE::VVector & _positions,
|
|
|
+ NICE::FloatImage & _noveltyImageGaussFiltered,
|
|
|
+ NICE::FloatImage * _noveltyImage )
|
|
|
+{
|
|
|
+ bool wasNoveltyImageGiven ( true );
|
|
|
+ if ( _noveltyImage == NULL )
|
|
|
+ {
|
|
|
+ _noveltyImage = new FloatImage ( _noveltyImageGaussFiltered.width(), _noveltyImageGaussFiltered.height() );
|
|
|
+ wasNoveltyImageGiven = false;
|
|
|
+ }
|
|
|
+
|
|
|
+ _noveltyImageGaussFiltered.set( 0.0 );
|
|
|
+ _noveltyImage->set( 0.0 );
|
|
|
+
|
|
|
+
|
|
|
+ NICE::VVector::const_iterator posIt = _positions.begin();
|
|
|
+ for ( NICE::VVector::const_iterator i = _features.begin();
|
|
|
+ i != _features.end();
|
|
|
+ i++, posIt++)
|
|
|
+ {
|
|
|
+
|
|
|
+ //loop over codebook representatives
|
|
|
+ double minDist ( std::numeric_limits<double>::max() );
|
|
|
+ for (NICE::VVector::const_iterator it = prototypes.begin(); it != prototypes.end(); it++)
|
|
|
+ {
|
|
|
+ //compute distance
|
|
|
+ double tmpDist ( this->distFunction->calculate(*i,*it) );
|
|
|
+ if (tmpDist < minDist)
|
|
|
+ minDist = tmpDist;
|
|
|
+ }
|
|
|
+
|
|
|
+ //take minimum distance and store in in a float image
|
|
|
+ (*_noveltyImage) ( (*posIt)[0], (*posIt)[1] ) = minDist;
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ //gauss-filtering for nicer visualization
|
|
|
+ float sigma ( 3.0 );
|
|
|
+ FilterT<float, float, float> filter;
|
|
|
+ filter.filterGaussSigmaApproximate ( *_noveltyImage, sigma, & _noveltyImageGaussFiltered );
|
|
|
+
|
|
|
+ if ( ! wasNoveltyImageGiven )
|
|
|
+ delete _noveltyImage;
|
|
|
+}
|
|
|
+
|
|
|
//**********************************************
|
|
|
//
|
|
|
// PUBLIC METHODS
|
|
@@ -192,17 +320,6 @@ FeatureLearningPrototypes::FeatureLearningPrototypes ( const Config *_conf,
|
|
|
const MultiDataset *_md, const std::string & _section )
|
|
|
: FeatureLearningGeneric ( _conf, _section )
|
|
|
{
|
|
|
- //feature stuff
|
|
|
- // which OpponentSIFT implementation to use {NICE, VANDESANDE}
|
|
|
- std::string opSiftImpl;
|
|
|
- opSiftImpl = conf->gS ( "Descriptor", "implementation", "VANDESANDE" );
|
|
|
- // read features?
|
|
|
- bool readfeat;
|
|
|
- readfeat = conf->gB ( "Descriptor", "read", true );
|
|
|
- // write features?
|
|
|
- bool writefeat;
|
|
|
- writefeat = conf->gB ( "Descriptor", "write", true );
|
|
|
-
|
|
|
|
|
|
// define the initial number of clusters our codebook shall contain
|
|
|
initialNumberOfClusters = conf->gI(section, "initialNumberOfClusters", 10);
|
|
@@ -225,48 +342,12 @@ FeatureLearningPrototypes::FeatureLearningPrototypes ( const Config *_conf,
|
|
|
//**********************************************
|
|
|
|
|
|
std::cerr << " SET UP VARIABLES AND METHODS " << std::endl;
|
|
|
-
|
|
|
- // Welche Opponentsift Implementierung soll genutzt werden ?
|
|
|
- LocalFeatureRepresentation *cSIFT = NULL;
|
|
|
- LocalFeatureRepresentation *writeFeats = NULL;
|
|
|
- LocalFeatureRepresentation *readFeats = NULL;
|
|
|
- this->featureExtractor = NULL;
|
|
|
- if ( opSiftImpl == "NICE" )
|
|
|
- {
|
|
|
- cSIFT = new OBJREC::LFonHSG ( conf, "HSGtrain" );
|
|
|
- }
|
|
|
- else if ( opSiftImpl == "VANDESANDE" )
|
|
|
- {
|
|
|
- cSIFT = new OBJREC::LFColorSande ( conf, "LFColorSandeTrain" );
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- fthrow ( Exception, "feattype: %s not yet supported" << opSiftImpl );
|
|
|
- }
|
|
|
-
|
|
|
- this->featureExtractor = cSIFT;
|
|
|
-
|
|
|
- if ( writefeat )
|
|
|
- {
|
|
|
- // write the features to a file, if there isn't any to read
|
|
|
- writeFeats = new LFWriteCache ( conf, cSIFT );
|
|
|
- this->featureExtractor = writeFeats;
|
|
|
- }
|
|
|
|
|
|
- if ( readfeat )
|
|
|
- {
|
|
|
- // read the features from a file
|
|
|
- if ( writefeat )
|
|
|
- {
|
|
|
- readFeats = new LFReadCache ( conf, writeFeats, -1 );
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- readFeats = new LFReadCache ( conf, cSIFT, -1 );
|
|
|
- }
|
|
|
- this->featureExtractor = readFeats;
|
|
|
- }
|
|
|
+ //feature extraction for initial codebook
|
|
|
+ this->featureExtractor = NULL;
|
|
|
+ this->setFeatureExtractor( true /* set for training */ );
|
|
|
|
|
|
+ //clustering algorithm
|
|
|
this->clusterAlgo = NULL;
|
|
|
this->setClusterAlgo( clusterAlgoString );
|
|
|
|
|
@@ -281,21 +362,17 @@ FeatureLearningPrototypes::FeatureLearningPrototypes ( const Config *_conf,
|
|
|
}
|
|
|
|
|
|
//run the training to initially compute a codebook and stuff like that
|
|
|
- this->train( _md );
|
|
|
-
|
|
|
- //only set feature stuff to NULL, deletion of the underlying object is done in the destructor
|
|
|
- if ( cSIFT != NULL )
|
|
|
- cSIFT = NULL;
|
|
|
- if ( writeFeats != NULL )
|
|
|
- writeFeats = NULL;
|
|
|
- if ( readFeats != NULL )
|
|
|
- readFeats = NULL ;
|
|
|
+ this->train( _md );
|
|
|
|
|
|
//so far, we have not seen any new image
|
|
|
this->newImageCounter = 0;
|
|
|
|
|
|
//TODO stupid
|
|
|
- this->maxValForVisualization = 0.005;
|
|
|
+ this->maxValForVisualization = conf->gD( section, "stupidMaxValForVisualization", 0.005 ) ;
|
|
|
+
|
|
|
+
|
|
|
+ //feature extraction for unseen images
|
|
|
+ this->setFeatureExtractor( false /* set for training */ );
|
|
|
}
|
|
|
|
|
|
FeatureLearningPrototypes::~FeatureLearningPrototypes()
|
|
@@ -309,8 +386,10 @@ FeatureLearningPrototypes::~FeatureLearningPrototypes()
|
|
|
delete featureExtractor;
|
|
|
}
|
|
|
|
|
|
-NICE::FloatImage FeatureLearningPrototypes::evaluateCurrentCodebook ( const std::string & _filename , const bool & beforeComputingNewFeatures )
|
|
|
+NICE::FloatImage FeatureLearningPrototypes::evaluateCurrentCodebookByDistance ( const std::string & _filename , const bool & beforeComputingNewFeatures )
|
|
|
{
|
|
|
+ std::cerr << " VISUALIZATION ----- maxValForVisualization: " << maxValForVisualization << std::endl;
|
|
|
+
|
|
|
NICE::ColorImage img( _filename );
|
|
|
if ( b_showTrainingImages )
|
|
|
{
|
|
@@ -329,51 +408,27 @@ NICE::FloatImage FeatureLearningPrototypes::evaluateCurrentCodebook ( const std:
|
|
|
Globals::setCurrentImgFN ( _filename );
|
|
|
featureExtractor->extractFeatures ( img, features, positions );
|
|
|
|
|
|
- FloatImage noveltyImage ( xsize, ysize );
|
|
|
- noveltyImage.set ( 0.0 );
|
|
|
-
|
|
|
- double maxDist ( 0.0 );
|
|
|
-
|
|
|
- NICE::VVector::const_iterator posIt = positions.begin();
|
|
|
- //store feature information in larger data structure
|
|
|
+ //normalization
|
|
|
for ( NICE::VVector::iterator i = features.begin();
|
|
|
i != features.end();
|
|
|
- i++, posIt++)
|
|
|
+ i++)
|
|
|
{
|
|
|
//normalization :)
|
|
|
i->normalizeL1();
|
|
|
-
|
|
|
- //loop over codebook representatives
|
|
|
- double minDist ( std::numeric_limits<double>::max() );
|
|
|
- for (NICE::VVector::const_iterator it = prototypes.begin(); it != prototypes.end(); it++)
|
|
|
- {
|
|
|
- //compute distance
|
|
|
- double tmpDist ( this->distFunction->calculate(*i,*it) );
|
|
|
- if (tmpDist < minDist)
|
|
|
- minDist = tmpDist;
|
|
|
- }
|
|
|
-
|
|
|
- if (minDist > maxDist)
|
|
|
- maxDist = minDist;
|
|
|
-
|
|
|
- //take minimum distance and store in in a float image
|
|
|
-
|
|
|
- noveltyImage ( (*posIt)[0], (*posIt)[1] ) = minDist;
|
|
|
- }
|
|
|
+ }
|
|
|
|
|
|
- //gauss-filtering for nicer visualization
|
|
|
+ FloatImage noveltyImage ( xsize, ysize );
|
|
|
FloatImage noveltyImageGaussFiltered ( xsize, ysize );
|
|
|
- float sigma ( 3.0 );
|
|
|
- FilterT<float, float, float> filter;
|
|
|
- filter.filterGaussSigmaApproximate ( noveltyImage, sigma, &noveltyImageGaussFiltered );
|
|
|
+
|
|
|
+ this->evaluateCurrentCodebookForGivenFeatures( features, positions, noveltyImageGaussFiltered, &noveltyImage );
|
|
|
+
|
|
|
+ double maxDist ( noveltyImage.max() );
|
|
|
double maxFiltered ( noveltyImageGaussFiltered.max() );
|
|
|
|
|
|
std::cerr << "maximum distance of Training images: " << maxDist << std::endl;
|
|
|
std::cerr << "maximum distance of Training images after filtering: " << maxFiltered << std::endl;
|
|
|
if ( beforeComputingNewFeatures )
|
|
|
this->oldMaxDist = maxFiltered;
|
|
|
- //for suitable visualization of scores between zero (known) and one (unknown)
|
|
|
-// noveltyImageGaussFiltered( 0 , 0 ) = std::max<double>(maxDist, 1.0);
|
|
|
|
|
|
|
|
|
//convert float to RGB
|
|
@@ -430,7 +485,7 @@ NICE::FloatImage FeatureLearningPrototypes::evaluateCurrentCodebook ( const std:
|
|
|
|
|
|
int posX ( ( positions[indexOfMostSimFeat] ) [0] );
|
|
|
int posY ( ( positions[indexOfMostSimFeat] ) [1] );
|
|
|
- NICE::Circle circ ( Coord( posX, posY), 2*tmpProtCnt /* radius*/, Color(200,0,255 ) );
|
|
|
+ NICE::Circle circ ( Coord( posX, posY), 2*(tmpProtCnt+1) /* radius*/, Color(200,0,255 ) );
|
|
|
img.draw(circ);
|
|
|
}
|
|
|
|
|
@@ -449,4 +504,100 @@ NICE::FloatImage FeatureLearningPrototypes::evaluateCurrentCodebook ( const std:
|
|
|
}
|
|
|
|
|
|
return noveltyImageGaussFiltered;
|
|
|
-}
|
|
|
+}
|
|
|
+
|
|
|
+NICE::ImageT< int > FeatureLearningPrototypes::evaluateCurrentCodebookByAssignments(const std::string& _filename, const bool& beforeComputingNewFeatures, const bool & _binaryShowLatestPrototype)
|
|
|
+{
|
|
|
+ std::cerr << "evaluateCurrentCodebookByAssignments" << std::endl;
|
|
|
+ NICE::ColorImage img( _filename );
|
|
|
+
|
|
|
+ int xsize ( img.width() );
|
|
|
+ int ysize ( img.height() );
|
|
|
+
|
|
|
+ //variables to store feature information
|
|
|
+ NICE::VVector features;
|
|
|
+ NICE::VVector cfeatures;
|
|
|
+ NICE::VVector positions;
|
|
|
+
|
|
|
+ //compute features
|
|
|
+ Globals::setCurrentImgFN ( _filename );
|
|
|
+ featureExtractor->extractFeatures ( img, features, positions );
|
|
|
+
|
|
|
+ //normalization
|
|
|
+ for ( NICE::VVector::iterator i = features.begin();
|
|
|
+ i != features.end();
|
|
|
+ i++)
|
|
|
+ {
|
|
|
+ //normalization :)
|
|
|
+ i->normalizeL1();
|
|
|
+ }
|
|
|
+
|
|
|
+ std::cerr << "normalization done - now look for nearest clusters for every extracted feature" << std::endl;
|
|
|
+
|
|
|
+ NICE::ImageT< int > clusterImage ( xsize, ysize );
|
|
|
+ clusterImage.set ( 0 );
|
|
|
+
|
|
|
+ NICE::VVector::const_iterator posIt = positions.begin();
|
|
|
+ for ( NICE::VVector::const_iterator i = features.begin();
|
|
|
+ i != features.end();
|
|
|
+ i++, posIt++)
|
|
|
+ {
|
|
|
+
|
|
|
+ //loop over codebook representatives
|
|
|
+ double minDist ( std::numeric_limits<double>::max() );
|
|
|
+ int indexOfNearestCluster ( 0 );
|
|
|
+ int clusterCounter ( 0 );
|
|
|
+ for (NICE::VVector::const_iterator it = this->prototypes.begin(); it != this->prototypes.end(); it++, clusterCounter++)
|
|
|
+ {
|
|
|
+ //compute distance
|
|
|
+ double tmpDist ( this->distFunction->calculate(*i,*it) );
|
|
|
+ if (tmpDist < minDist)
|
|
|
+ {
|
|
|
+ minDist = tmpDist;
|
|
|
+ indexOfNearestCluster = clusterCounter;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ //take minimum distance and store in in a float image
|
|
|
+ //TODO hard coded!!!
|
|
|
+ int noProtoTypes ( this->prototypes.size() -1 );
|
|
|
+
|
|
|
+ for ( int tmpY = (*posIt)[1] - 1; tmpY < (*posIt)[1] + 1; tmpY++)
|
|
|
+ {
|
|
|
+ for ( int tmpX = (*posIt)[0] - 1; tmpX < (*posIt)[0] + 1; tmpX++)
|
|
|
+ {
|
|
|
+ if ( _binaryShowLatestPrototype )
|
|
|
+ {
|
|
|
+ //just a binary image - 1 if newest prototype is nearest - 0 if not
|
|
|
+ if ( indexOfNearestCluster == noProtoTypes)
|
|
|
+ clusterImage ( tmpX, tmpY ) = 1;
|
|
|
+ else
|
|
|
+ clusterImage ( tmpX, tmpY ) = 0;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ //as many different values as current prototypes available
|
|
|
+ clusterImage ( tmpX, tmpY ) = indexOfNearestCluster;
|
|
|
+ }
|
|
|
+ }
|
|
|
+// clusterImage ( (*posIt)[0], (*posIt)[1] ) = indexOfNearestCluster;
|
|
|
+ }
|
|
|
+
|
|
|
+ //show how many clusters we have
|
|
|
+ if ( !_binaryShowLatestPrototype )
|
|
|
+ {
|
|
|
+ int tmpCnt ( 0 );
|
|
|
+ for (NICE::VVector::const_iterator protoIt = prototypes.begin(); protoIt != prototypes.end(); protoIt++, tmpCnt++)
|
|
|
+ {
|
|
|
+ for ( int tmpY = 1 + 2 - 2; tmpY < (2 + 2); tmpY++)
|
|
|
+ {
|
|
|
+ for ( int tmpX = 1 + 5*tmpCnt - 2; tmpX < (1 + 5*tmpCnt + 2); tmpX++)
|
|
|
+ {
|
|
|
+ //Take care, this might go "over" the image
|
|
|
+ clusterImage ( tmpX, tmpY ) = (Ipp8u) tmpCnt;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ std::cerr << " evaluateCurrentCodebookByAssignments done" << std::endl;
|
|
|
+ return clusterImage;
|
|
|
+}
|