Bladeren bron

major bugfix

Erik Rodner 9 jaren geleden
bovenliggende
commit
27f0611efc
3 gewijzigde bestanden met toevoegingen van 16 en 33 verwijderingen
  1. 2 13
      GMHIKernelRaw.cpp
  2. 11 16
      GPHIKRawClassifier.cpp
  3. 3 4
      GPLikelihoodApprox.cpp

+ 2 - 13
GMHIKernelRaw.cpp

@@ -132,19 +132,7 @@ void GMHIKernelRaw::initData ( const std::vector< const NICE::SparseVector *> &_
 
     // pre-allocate the A and B matrices
     this->table_A = allocateTable();
-    this->table_A = new double *[this->num_dimension];
-    this->table_B = new double *[this->num_dimension];
-    for (uint i = 0; i < this->num_dimension; i++)
-    {
-        uint nnz = this->nnz_per_dimension[i];
-        if (nnz>0) {
-            this->table_A[i] = new double [ nnz ];
-            this->table_B[i] = new double [ nnz ];
-        } else {
-            this->table_A[i] = NULL;
-            this->table_B[i] = NULL;
-        }
-    }
+    this->table_B = allocateTable();
 }
 
 double **GMHIKernelRaw::allocateTable() const
@@ -202,6 +190,7 @@ void GMHIKernelRaw::updateTables ( const NICE::Vector _x ) const
       }
     }
 
+
 }
 
 /** multiply with a vector: A*x = y */

+ 11 - 16
GPHIKRawClassifier.cpp

@@ -201,8 +201,18 @@ void GPHIKRawClassifier::classify ( const NICE::SparseVector * _xstar,
         //this->X_sorted.findFirstLargerInDimension(dim, fval, position);
         GMHIKernelRaw::sparseVectorElement fval_element;
         fval_element.value = fval;
+
+        //std::cerr << "value to search for " << fval << endl;
+        //std::cerr << "data matrix in dimension " << dim << endl;
+        //for (int j = 0; j < nnz; j++)
+        //    std::cerr << dataMatrix[dim][j].value << std::endl;
+
         GMHIKernelRaw::sparseVectorElement *it = upper_bound ( dataMatrix[dim], dataMatrix[dim] + nnz, fval_element );
         position = distance ( dataMatrix[dim], it );
+        // add zero elements
+        if ( fval_element.value > 0.0 )
+            position += nz;
+
 
         bool posIsZero ( position == 0 );
         if ( !posIsZero )
@@ -364,28 +374,13 @@ void GPHIKRawClassifier::train ( const std::vector< const NICE::SparseVector *>
     */
     alpha = (y * (1.0 / eigenMax[0]) );
 
-    //DEBUG!!!
-    if ( this->b_debug && classno == 1 )
-    {
-        std::cerr << "Training for class " << classno << endl;
-        std::cerr << y << std::endl;
-        std::cerr << " alpha before and after linsolve" << classno << endl;
-        std::cerr << "  " << alpha << std::endl;
-    }
-
     solver->solveLin( *gm, y, alpha );
 
-    //DEBUG!!!
-    if ( this->b_debug && classno == 1 )
-    {
-//        std::cerr << "Training for class " << classno << endl;
-        std::cerr << "  " << alpha << std::endl;
-    }
-
     // TODO: get lookup tables, A, B, etc. and store them
     gm->updateTables(alpha);
     double **A = gm->getTableA();
     double **B = gm->getTableB();
+
     precomputedA.insert ( pair<uint, PrecomputedType> ( classno, A ) );
     precomputedB.insert ( pair<uint, PrecomputedType> ( classno, B ) );
   }

+ 3 - 4
GPLikelihoodApprox.cpp

@@ -191,16 +191,15 @@ void GPLikelihoodApprox::computeAlphaDirect(const OPTIMIZATION::matrix_type & _x
      *  This reduces the number of iterations by 5 or 8
      */
     NICE::Vector alpha;
-    
     alpha = (binaryLabels[classCnt] * (1.0 / _eigenValues[0]) );
-    
+
     if ( verbose )
       std::cerr << "Using the standard solver ..." << std::endl;
 
     t.start();
     linsolver->solveLin ( *ikm, binaryLabels[classCnt], alpha );
     t.stop();
-   
+
     alphas.insert( std::pair<uint, NICE::Vector> ( classCnt, alpha) );
   }  
   
@@ -429,4 +428,4 @@ void GPLikelihoodApprox::setVerbose( const bool & _verbose )
 void GPLikelihoodApprox::setDebug( const bool & _debug )
 {
   this->debug = _debug;
-}
+}