|
@@ -1,6 +1,6 @@
|
|
|
/**
|
|
|
-* @file KCNullSpaceNovelty.cpp
|
|
|
-* @brief Novelty detection with kernel null space methods (Kernel Null Foley Sammon Transform - KNFST)
|
|
|
+* @file KCNullSpace.cpp
|
|
|
+* @brief Classification and novelty detection with kernel null space methods (Kernel Null Foley Sammon Transform - KNFST)
|
|
|
* @author Paul Bodesheim
|
|
|
* @date 26/11/2012
|
|
|
|
|
@@ -10,7 +10,7 @@
|
|
|
|
|
|
#include "core/vector/Algorithms.h"
|
|
|
|
|
|
-#include "KCNullSpaceNovelty.h"
|
|
|
+#include "KCNullSpace.h"
|
|
|
#include <limits>
|
|
|
|
|
|
#undef DEBUG
|
|
@@ -19,7 +19,7 @@ using namespace NICE;
|
|
|
using namespace std;
|
|
|
using namespace OBJREC;
|
|
|
|
|
|
-KCNullSpaceNovelty::KCNullSpaceNovelty( const Config *conf, Kernel *kernelFunction, const string & section )
|
|
|
+KCNullSpace::KCNullSpace( const Config *conf, Kernel *kernelFunction, const string & section )
|
|
|
: KernelClassifier ( conf, kernelFunction )
|
|
|
{
|
|
|
this->maxClassNo = 0;
|
|
@@ -27,7 +27,7 @@ KCNullSpaceNovelty::KCNullSpaceNovelty( const Config *conf, Kernel *kernelFuncti
|
|
|
|
|
|
}
|
|
|
|
|
|
-KCNullSpaceNovelty::KCNullSpaceNovelty( const KCNullSpaceNovelty &vcova ): KernelClassifier(vcova)
|
|
|
+KCNullSpace::KCNullSpace( const KCNullSpace &vcova ): KernelClassifier(vcova)
|
|
|
{
|
|
|
|
|
|
verbose = vcova.verbose;
|
|
@@ -35,8 +35,7 @@ KCNullSpaceNovelty::KCNullSpaceNovelty( const KCNullSpaceNovelty &vcova ): Kerne
|
|
|
oneClassSetting = vcova.oneClassSetting;
|
|
|
|
|
|
trainingSetStatistic.clear();
|
|
|
- std::map<int,int>::iterator it;
|
|
|
- for ( it = ( (std::map<int,int>)vcova.trainingSetStatistic ).begin(); it != vcova.trainingSetStatistic.end(); it++ )
|
|
|
+ for ( std::map<int,int>::const_iterator it = vcova.trainingSetStatistic.begin(); it != vcova.trainingSetStatistic.end(); it++ )
|
|
|
{
|
|
|
trainingSetStatistic.insert(pair<int,int>( (*it).first,(*it).second ));
|
|
|
}
|
|
@@ -47,14 +46,14 @@ KCNullSpaceNovelty::KCNullSpaceNovelty( const KCNullSpaceNovelty &vcova ): Kerne
|
|
|
{
|
|
|
for(int j = 0; j < (int)vcova.nullProjectionDirections.cols(); j++)
|
|
|
{
|
|
|
- nullProjectionDirections(i,j) = vcova.nullProjectionDirections(i,j);
|
|
|
+ nullProjectionDirections(i,j) = vcova.nullProjectionDirections(i,j);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
targetPoints.clear();
|
|
|
- for(int i = 0; i < (int)vcova.targetPoints.size(); i++)
|
|
|
+ for( std::map<int,NICE::Vector>::const_iterator it = vcova.targetPoints.begin(); it != vcova.targetPoints.end(); it++ )
|
|
|
{
|
|
|
- targetPoints.push_back( NICE::Vector(vcova.targetPoints[i]) );
|
|
|
+ targetPoints.insert(pair<int,NICE::Vector>( (*it).first,(*it).second ));
|
|
|
}
|
|
|
|
|
|
eigenBasis.resize( vcova.eigenBasis.rows(),vcova.eigenBasis.cols() );
|
|
@@ -68,71 +67,77 @@ KCNullSpaceNovelty::KCNullSpaceNovelty( const KCNullSpaceNovelty &vcova ): Kerne
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-KCNullSpaceNovelty::~KCNullSpaceNovelty()
|
|
|
+KCNullSpace::~KCNullSpace()
|
|
|
{
|
|
|
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::teach ( KernelData *kernelData, const NICE::Vector & y )
|
|
|
+void KCNullSpace::teach ( KernelData *kernelData, const NICE::Vector & y )
|
|
|
{
|
|
|
NICE::Vector labels(y);
|
|
|
- maxClassNo = (int)labels.Max();
|
|
|
+ int maxLabel( (int)labels.Max());
|
|
|
|
|
|
/** check if we are in a one-class setting */
|
|
|
- int minClassNo = (int)labels.Min();
|
|
|
- if (maxClassNo == minClassNo)
|
|
|
+ int minLabel( (int)labels.Min());
|
|
|
+ if (maxLabel == minLabel)
|
|
|
{
|
|
|
oneClassSetting = true;
|
|
|
+ maxClassNo = 1;
|
|
|
if (verbose)
|
|
|
- std::cerr << "KCNullSpaceNovelty::teach: one-class setting" << std::endl;
|
|
|
- computeTrainingSetStatistic(labels);
|
|
|
+ std::cerr << "KCNullSpace::teach: one-class setting" << std::endl;
|
|
|
/** one-class setting: add a row and a column of zeros to the kernel matrix representing dot products with origin in kernel feature space*/
|
|
|
kernelData->increase_size_by_One();
|
|
|
kernelData->getKernelMatrix() (kernelData->getKernelMatrix().rows()-1, kernelData->getKernelMatrix().cols()-1) = 0.0;
|
|
|
- labels.append(minClassNo+1);
|
|
|
+ labels.append(minLabel+1);
|
|
|
+ computeTrainingSetStatistic(labels);
|
|
|
+
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
oneClassSetting = false;
|
|
|
if (verbose)
|
|
|
- std::cerr << "KCNullSpaceNovelty::teach: multi-class setting" << std::endl;
|
|
|
+ std::cerr << "KCNullSpace::teach: multi-class setting" << std::endl;
|
|
|
computeTrainingSetStatistic(labels);
|
|
|
+ maxClassNo = trainingSetStatistic.size()-1; // number of classes, start counting at 0
|
|
|
}
|
|
|
-
|
|
|
- if (verbose)
|
|
|
- std::cerr << "KCNullSpaceNovelty::teach: compute null projection directions..." << std::endl;
|
|
|
+
|
|
|
computeNullProjectionDirections(kernelData,labels);
|
|
|
- if (verbose)
|
|
|
- std::cerr << "KCNullSpaceNovelty::teach: compute target points..." << std::endl;
|
|
|
+
|
|
|
computeTargetPoints(kernelData,labels);
|
|
|
+
|
|
|
+ if (oneClassSetting)
|
|
|
+ {
|
|
|
+ /** remove the value that corresponds to the dot product with the origin in the kernel feature space, since this is always equal to zero */
|
|
|
+ nullProjectionDirections.deleteRow(nullProjectionDirections.rows()-1);
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
-std::map<int,int> KCNullSpaceNovelty::getTrainingSetStatistic()
|
|
|
+std::map<int,int> * KCNullSpace::getTrainingSetStatistic()
|
|
|
{
|
|
|
- return trainingSetStatistic;
|
|
|
+ return &trainingSetStatistic;
|
|
|
}
|
|
|
|
|
|
-NICE::Matrix KCNullSpaceNovelty::getNullProjectionDirections()
|
|
|
+NICE::Matrix KCNullSpace::getNullProjectionDirections()
|
|
|
{
|
|
|
return nullProjectionDirections;
|
|
|
}
|
|
|
|
|
|
-NICE::VVector KCNullSpaceNovelty::getTargetPoints()
|
|
|
+std::map<int,NICE::Vector> * KCNullSpace::getTargetPoints()
|
|
|
{
|
|
|
- return targetPoints;
|
|
|
+ return &targetPoints;
|
|
|
}
|
|
|
|
|
|
-int KCNullSpaceNovelty::getNullSpaceDimension()
|
|
|
+int KCNullSpace::getNullSpaceDimension()
|
|
|
{
|
|
|
return dimNullSpace;
|
|
|
}
|
|
|
|
|
|
-bool KCNullSpaceNovelty::isOneClass()
|
|
|
+bool KCNullSpace::isOneClass()
|
|
|
{
|
|
|
return oneClassSetting;
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::computeTrainingSetStatistic(const NICE::Vector & y)
|
|
|
+void KCNullSpace::computeTrainingSetStatistic(const NICE::Vector & y)
|
|
|
{
|
|
|
trainingSetStatistic.clear();
|
|
|
std::map<int,int>::iterator it;
|
|
@@ -150,8 +155,11 @@ void KCNullSpaceNovelty::computeTrainingSetStatistic(const NICE::Vector & y)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::computeBasisUsingKernelPCA(const KernelData *kernelData)
|
|
|
+void KCNullSpace::computeBasisUsingKernelPCA(const KernelData *kernelData)
|
|
|
{
|
|
|
+ if (verbose)
|
|
|
+ std::cerr << "KCNullSpace::computeBasisUsingKernelPCA: compute kernel PCA basis..." << std::endl;
|
|
|
+
|
|
|
NICE::Matrix K (kernelData->getKernelMatrix());
|
|
|
|
|
|
/** let K represent dot products of zero mean data in kernel feature space */
|
|
@@ -160,8 +168,13 @@ void KCNullSpaceNovelty::computeBasisUsingKernelPCA(const KernelData *kernelData
|
|
|
/** get eigenvectors and eigenvalues (descreasing order) of centered kernel matrix*/
|
|
|
NICE::Matrix eigenVectors(K.rows(), K.cols(), 0.0);
|
|
|
NICE::Vector eigenValues(K.rows(), 0.0);
|
|
|
+ K.addIdentity(1.0);
|
|
|
eigenvectorvalues(K, eigenVectors, eigenValues);
|
|
|
-
|
|
|
+ eigenValues -= 1.0;
|
|
|
+// NICE::GMCovariance gm(&K);
|
|
|
+// NICE::EigValuesTRLAN ev;
|
|
|
+// ev.getEigenvalues(gm, eigenValues, eigenVectors, K.rows());
|
|
|
+
|
|
|
/** only use eigenvectors of non-zero eigenvalues */
|
|
|
int j(0);
|
|
|
for (size_t i=0; i<K.rows(); i++)
|
|
@@ -187,21 +200,25 @@ void KCNullSpaceNovelty::computeBasisUsingKernelPCA(const KernelData *kernelData
|
|
|
}
|
|
|
|
|
|
eigenBasis = eigenVectors;
|
|
|
+
|
|
|
+ if (verbose)
|
|
|
+ std::cerr << "KCNullSpace::computeBasisUsingKernelPCA: computation done" << std::endl;
|
|
|
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::centerKernelMatrix(NICE::Matrix & kernelMatrix)
|
|
|
+void KCNullSpace::centerKernelMatrix(NICE::Matrix & kernelMatrix)
|
|
|
{
|
|
|
NICE::Matrix onesK (kernelMatrix.rows(), kernelMatrix.cols(), 1.0/kernelMatrix.rows());
|
|
|
kernelMatrix = kernelMatrix - onesK*kernelMatrix - kernelMatrix*onesK + onesK*kernelMatrix*onesK;
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::computeNullProjectionDirections ( const KernelData *kernelData, const NICE::Vector & y )
|
|
|
+void KCNullSpace::computeNullProjectionDirections ( const KernelData *kernelData, const NICE::Vector & y )
|
|
|
{
|
|
|
|
|
|
- /** obtain Kernel PCA basis */
|
|
|
if (verbose)
|
|
|
- std::cerr << "KCNullSpaceNovelty::computeNullProjectionDirections: compute kernel PCA basis..." << std::endl;
|
|
|
+ std::cerr << "KCNullSpace::computeNullProjectionDirections: compute null projection directions..." << std::endl;
|
|
|
+
|
|
|
+ /** obtain Kernel PCA basis */
|
|
|
computeBasisUsingKernelPCA(kernelData);
|
|
|
|
|
|
/** set matrix IM=(I-M) with I being the unit matrix and M being a matrix with all entries equal to 1/n where n is the number of training samples */
|
|
@@ -214,7 +231,7 @@ void KCNullSpaceNovelty::computeNullProjectionDirections ( const KernelData *ker
|
|
|
for (size_t c=0; c<IL.cols(); c++)
|
|
|
{
|
|
|
/** if sample with index r is in the same class as sample with index c, then insert the value 1/numClassSamples */
|
|
|
- for (size_t r=0; IL.rows(); r++)
|
|
|
+ for (size_t r=0; r<IL.rows(); r++)
|
|
|
{
|
|
|
if ( y(r) == y(c) )
|
|
|
{
|
|
@@ -234,7 +251,15 @@ void KCNullSpaceNovelty::computeNullProjectionDirections ( const KernelData *ker
|
|
|
/** get eigenvectors and eigenvalues (descreasing order) of T */
|
|
|
NICE::Matrix eigenVectors(T.rows(), T.cols(), 0.0);
|
|
|
NICE::Vector eigenValues(T.rows(), 0.0);
|
|
|
+ T.addIdentity(1.0);
|
|
|
eigenvectorvalues(T, eigenVectors, eigenValues);
|
|
|
+ eigenValues -= 1.0;
|
|
|
+// NICE::GMCovariance gm(&T);
|
|
|
+// NICE::EigValuesTRLAN ev;
|
|
|
+// ev.getEigenvalues(gm, eigenValues, eigenVectors, T.rows());
|
|
|
+
|
|
|
+ if (verbose)
|
|
|
+ std::cerr << "T: " << T.rows() << " x " << T.rows() << " nan: " << T.containsNaN()<< std::endl;
|
|
|
|
|
|
/** only use eigenvectors of zero eigenvalues (null space!!!) but at least one eigenvector according to the smallest eigenvalue (therefore start at index i=T.rows()-2)*/
|
|
|
for (int i=T.rows()-2; i>=0; i--)
|
|
@@ -248,13 +273,16 @@ void KCNullSpaceNovelty::computeNullProjectionDirections ( const KernelData *ker
|
|
|
/** compute null projection directions */
|
|
|
nullProjectionDirections = IM*eigenVectors;
|
|
|
dimNullSpace = nullProjectionDirections.cols();
|
|
|
+
|
|
|
if (verbose)
|
|
|
- std::cerr << "KCNullSpaceNovelty::computeNullProjectionDirections: computation done" << std::endl;
|
|
|
+ std::cerr << "KCNullSpace::computeNullProjectionDirections: computation done" << std::endl;
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::computeTargetPoints ( const KernelData *kernelData, const NICE::Vector & y )
|
|
|
+void KCNullSpace::computeTargetPoints ( const KernelData *kernelData, const NICE::Vector & y )
|
|
|
{
|
|
|
-
|
|
|
+ if (verbose)
|
|
|
+ std::cerr << "KCNullSpace::computeTargetPoints: compute target points..." << std::endl;
|
|
|
+
|
|
|
targetPoints.clear();
|
|
|
NICE::Vector targetPoint (dimNullSpace, 0.0);
|
|
|
int classLabel(0);
|
|
@@ -277,21 +305,20 @@ void KCNullSpaceNovelty::computeTargetPoints ( const KernelData *kernelData, con
|
|
|
targetPoint /= (*it).second;
|
|
|
|
|
|
/** we only have one target point in an one-class setting */
|
|
|
- targetPoints.push_back(targetPoint);
|
|
|
+ targetPoints.insert(pair<int,NICE::Vector>( classLabel,targetPoint));
|
|
|
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
|
|
|
/** create averaging vectors for each class, necessary to compute target points at the end of this method */
|
|
|
- std::vector<NICE::Vector> averagingVectors;
|
|
|
+ std::map<int,NICE::Vector> averagingVectors;
|
|
|
averagingVectors.clear();
|
|
|
NICE::Vector averagingVector(y.size(),0.0);
|
|
|
|
|
|
- /** insert one averaging vector and one target point vector for each class */
|
|
|
- std::map<int,int>::iterator it;
|
|
|
+ /** insert one averaging vector for each class */
|
|
|
int numClassSamples(0);
|
|
|
- for ( it = trainingSetStatistic.begin(); it != trainingSetStatistic.end(); it++ )
|
|
|
+ for ( std::map<int,int>::iterator it = trainingSetStatistic.begin(); it != trainingSetStatistic.end(); it++ )
|
|
|
{
|
|
|
|
|
|
/** create current averaging vector */
|
|
@@ -310,60 +337,87 @@ void KCNullSpaceNovelty::computeTargetPoints ( const KernelData *kernelData, con
|
|
|
}
|
|
|
|
|
|
/** insert averaging vector for current class*/
|
|
|
- averagingVectors.push_back(averagingVector);
|
|
|
- /** insert a null vector for the target point of the current class */
|
|
|
- targetPoints.push_back(targetPoint);
|
|
|
+ averagingVectors.insert(pair<int,NICE::Vector>( classLabel,averagingVector));
|
|
|
}
|
|
|
|
|
|
/** compute target points using previously created averaging vectors: average for each class the projections of the class samples in the null space */
|
|
|
- for ( size_t i = 0 ; i < targetPoints.size(); i++ )
|
|
|
+ for ( std::map<int,NICE::Vector>::iterator it = averagingVectors.begin(); it != averagingVectors.end(); it++ )
|
|
|
{
|
|
|
- targetPoints[i] = nullProjectionDirections.transpose() * kernelData->getKernelMatrix() * averagingVectors[i];
|
|
|
+ targetPoint = nullProjectionDirections.transpose() * kernelData->getKernelMatrix() * (*it).second;
|
|
|
+ targetPoints.insert(pair<int,NICE::Vector>( (*it).first,targetPoint));
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if (verbose)
|
|
|
- std::cerr << "KCNullSpaceNovelty::computeTargetPoints: computation done" << std::endl;
|
|
|
+ std::cerr << "KCNullSpace::computeTargetPoints: computation done" << std::endl;
|
|
|
|
|
|
}
|
|
|
|
|
|
-ClassificationResult KCNullSpaceNovelty::classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const
|
|
|
+ClassificationResult KCNullSpace::classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const
|
|
|
{
|
|
|
if ( targetPoints.size() <= 0 )
|
|
|
fthrow(Exception, "The classifier was not trained with training data (use teach(...))");
|
|
|
|
|
|
+ if (oneClassSetting)
|
|
|
+ {
|
|
|
+ return noveltyDetection ( kernelVector, kernelSelf );
|
|
|
+ }
|
|
|
+
|
|
|
NICE::Vector projection(dimNullSpace,0.0);
|
|
|
projection = nullProjectionDirections.transpose() * kernelVector;
|
|
|
|
|
|
- FullVector scores ( trainingSetStatistic.size() );
|
|
|
- scores.set(0);
|
|
|
+ FullVector scores ( maxClassNo+1 );
|
|
|
+ scores.set(- std::numeric_limits<double>::max());
|
|
|
|
|
|
- std::map<int,int>::iterator it;
|
|
|
- int iter(0);
|
|
|
- for ( it = ( (std::map<int,int>)trainingSetStatistic ).begin(); it != trainingSetStatistic.end(); it++ )
|
|
|
+ int iter(0);
|
|
|
+ for ( std::map<int,NICE::Vector>::const_iterator it = targetPoints.begin(); it != targetPoints.end(); it++ )
|
|
|
{
|
|
|
- scores[iter] = -(targetPoints[iter] - projection).normL2();
|
|
|
+ scores[iter] = 1.0-(it->second - projection).normL2();
|
|
|
iter++;
|
|
|
}
|
|
|
|
|
|
ClassificationResult r ( scores.maxElement(), scores );
|
|
|
-
|
|
|
return r;
|
|
|
}
|
|
|
|
|
|
-KCNullSpaceNovelty* KCNullSpaceNovelty::clone(void) const
|
|
|
+ClassificationResult KCNullSpace::noveltyDetection ( const NICE::Vector & kernelVector, double kernelSelf ) const
|
|
|
{
|
|
|
- KCNullSpaceNovelty *classifier = new KCNullSpaceNovelty( *this );
|
|
|
+ if ( targetPoints.size() <= 0 )
|
|
|
+ fthrow(Exception, "The classifier was not trained with training data (use teach(...))");
|
|
|
+
|
|
|
+ NICE::Vector projection(dimNullSpace,0.0);
|
|
|
+ projection = nullProjectionDirections.transpose() * kernelVector;
|
|
|
+
|
|
|
+ FullVector scores ( 2 );
|
|
|
+ scores.set(- std::numeric_limits<double>::max());
|
|
|
+ double tmp_score(0.0);
|
|
|
+
|
|
|
+ for ( std::map<int,NICE::Vector>::const_iterator it = targetPoints.begin(); it != targetPoints.end(); it++ )
|
|
|
+ {
|
|
|
+ tmp_score = 1.0-(it->second - projection).normL2();
|
|
|
+ if (tmp_score > scores[1])
|
|
|
+ {
|
|
|
+ scores[1] = tmp_score;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ ClassificationResult r ( scores.maxElement(), scores );
|
|
|
+ return r;
|
|
|
+}
|
|
|
+
|
|
|
+KCNullSpace* KCNullSpace::clone(void) const
|
|
|
+{
|
|
|
+ KCNullSpace *classifier = new KCNullSpace( *this );
|
|
|
return classifier;
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::clear()
|
|
|
+void KCNullSpace::clear()
|
|
|
{
|
|
|
//nothing to clear
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::restore(std::istream& ifs, int type)
|
|
|
+void KCNullSpace::restore(std::istream& ifs, int type)
|
|
|
{
|
|
|
ifs.precision (numeric_limits<double>::digits10 + 1);
|
|
|
ifs >> maxClassNo;
|
|
@@ -390,14 +444,15 @@ void KCNullSpaceNovelty::restore(std::istream& ifs, int type)
|
|
|
ifs >> k;
|
|
|
for (int i=0; i<k; i++)
|
|
|
{
|
|
|
+ ifs >> classLabel;
|
|
|
ifs >> targetPoint;
|
|
|
- targetPoints.push_back(targetPoint);
|
|
|
+ targetPoints.insert( pair<int,NICE::Vector>(classLabel,targetPoint) );
|
|
|
}
|
|
|
|
|
|
KernelClassifier::restore(ifs,type);
|
|
|
}
|
|
|
|
|
|
-void KCNullSpaceNovelty::store(std::ostream& ofs, int type) const
|
|
|
+void KCNullSpace::store(std::ostream& ofs, int type) const
|
|
|
{
|
|
|
ofs.precision (numeric_limits<double>::digits10 + 1);
|
|
|
|
|
@@ -408,8 +463,7 @@ void KCNullSpaceNovelty::store(std::ostream& ofs, int type) const
|
|
|
ofs << eigenBasis << endl;
|
|
|
|
|
|
ofs << trainingSetStatistic.size() << endl;
|
|
|
- std::map<int,int>::iterator it;
|
|
|
- for (it = ( (std::map<int,int>)trainingSetStatistic ).begin() ; it != trainingSetStatistic.end(); it++)
|
|
|
+ for (std::map<int,int>::const_iterator it = trainingSetStatistic.begin() ; it != trainingSetStatistic.end(); it++)
|
|
|
{
|
|
|
ofs << (*it).first << endl;
|
|
|
ofs << (*it).second << endl;
|
|
@@ -418,9 +472,11 @@ void KCNullSpaceNovelty::store(std::ostream& ofs, int type) const
|
|
|
ofs << nullProjectionDirections << endl;
|
|
|
|
|
|
ofs << targetPoints.size() << endl;
|
|
|
- for (size_t k=0; k<targetPoints.size(); k++)
|
|
|
+ for (std::map<int,NICE::Vector>::const_iterator it = targetPoints.begin() ; it != targetPoints.end(); it++)
|
|
|
{
|
|
|
- ofs << targetPoints[k] << endl;
|
|
|
+
|
|
|
+ ofs << (*it).first << endl;
|
|
|
+ ofs << (*it).second << endl;
|
|
|
}
|
|
|
|
|
|
KernelClassifier::store(ofs,type);
|