瀏覽代碼

changes in ImageNetData and ClassificationResults

bodesheim 12 年之前
父節點
當前提交
9eb701a469

+ 16 - 0
cbaselib/ClassificationResults.cpp

@@ -86,3 +86,19 @@ double ClassificationResults::getBinaryClassPerformance ( int type ) const
 	else
 		return la.calcAveragePrecisionPrecise ( x, y );
 }
+
+double ClassificationResults::getAverageRecognitionRate() const
+{
+  const_iterator i = begin();
+  NICE::Matrix confusion ( i->scores.size(),i->scores.size(),0.0 );
+
+  for ( ; i != end(); i++ )
+  {
+    const ClassificationResult & r = *i;
+    uint classno_estimated = r.classno;
+    uint classno_groundtruth = r.classno_groundtruth;
+    confusion( classno_estimated, classno_groundtruth ) += 1;
+  }
+  confusion.normalizeColumnsL1();
+  return confusion.trace()/confusion.rows();  
+}

+ 3 - 0
cbaselib/ClassificationResults.h

@@ -51,6 +51,9 @@ class ClassificationResults : public std::vector<ClassificationResult>
 	/** please choose the type from the public enum,
 	 *  this only works for binary classification problems */
 	double getBinaryClassPerformance ( int type = PERF_AUC ) const;
+        
+        /** return average recognition rate, useful for  multi-class problems */
+        double getAverageRecognitionRate() const;
 };
 
 

+ 9 - 0
classifier/kernelclassifier/progs/libdepend.inc

@@ -0,0 +1,9 @@
+$(call PKG_DEPEND_INT,core/basics)
+$(call PKG_DEPEND_INT,core/algebra)
+$(call PKG_DEPEND_INT,vislearning/math)
+$(call PKG_DEPEND_INT,vislearning/baselib)
+$(call PKG_DEPEND_INT,vislearning/cbaselib)
+$(call PKG_DEPEND_INT,vislearning/classifier/kernelclassifier)
+$(call PKG_DEPEND_INT,vislearning/matlabAccessHighLevel)
+
+

+ 29 - 0
matlabAccessHighLevel/ImageNetData.cpp

@@ -59,6 +59,35 @@ void ImageNetData::preloadData ( const string & fileTag, const string & variable
   cerr << "ImageNetData: data conversion finished." << endl;
 }
 
+void ImageNetData::normalizeData ( const string & normTag ) 
+{
+  if ( normTag.compare("L1") == 0 )
+  {
+    for ( std::vector< SparseVector >::iterator it = XPreload.begin(); it != XPreload.end(); it++ )
+    {
+      it->normalize();
+    }
+    return;
+  } 
+  
+  if ( normTag.compare("L2") == 0 )
+  {
+    double L2norm(0.0);
+    NICE::SparseVector tmpVec;
+    for ( std::vector< SparseVector >::iterator it = XPreload.begin(); it != XPreload.end(); it++ )
+    {
+      tmpVec = *it;
+      tmpVec.multiply(*it);
+      L2norm = tmpVec.sum();
+      it->divide(L2norm);
+    }
+    return;
+  }  
+  
+  cerr << "ImageNetData::normalizeData: invalid normTag... data was not normalized" << endl;
+  
+}
+
 void ImageNetData::loadDataAsLabeledSetVector( OBJREC::LabeledSetVector & lsVector, const std::string & fileTag, const std::string & variableTag )
 {
   sparse_t m_XPreload;

+ 9 - 2
matlabAccessHighLevel/ImageNetData.h

@@ -1,3 +1,4 @@
+
 /** 
 * @file ImageNetData.h
 * @author Erik Rodner
@@ -12,7 +13,7 @@
 #include <string>
 
 #include <core/vector/VectorT.h>
-#include <core/vector/SparseVector.h>
+#include <core/vector/SparseVectorT.h>
 #include <core/matlabAccess/MatFileIO.h>
 
 #include "vislearning/cbaselib/LabeledSet.h"
@@ -52,7 +53,6 @@ class ImageNetData
     */
     void getBatchData ( sparse_t & data, Vector & y, const std::string & fileTag = "train", const std::string & variableTag = "training" );
 
-
     /**
     * @brief load the data specified for later access using the get functions
     *
@@ -61,6 +61,13 @@ class ImageNetData
     */
     void preloadData ( const std::string & fileTag = "val", const std::string & variableTag = "testing" );
     
+    /**
+    * @brief normalize the data given the specified norm
+    *
+    * @param normTag vector norm used in normalization, "L1" or "L2"
+    */
+    void normalizeData ( const std::string & normTag = "L1" ); 
+    
     /**
     * @brief load the data specified for later access using the get functions, give everything as a LabeledSetVector object which is usefull for objects of type KernelClassifier (as used in vislearning)
     *

+ 2 - 1
regression/gpregression/RegGaussianProcess.cpp

@@ -137,7 +137,8 @@ void RegGaussianProcess::teach ( KernelData *kernelData, const NICE::Vector & y
 			fthrow(Exception, "KCGPRegression: you have to specify a kernel function !" );
 		}
 	} else {
-		if ( !kernelData->hasCholeskyFactorization() ) 
+
+		if ( !kernelData->hasCholeskyFactorization() )
 			kernelData->updateCholeskyFactorization();
 	}