|
@@ -1,556 +0,0 @@
|
|
|
-#ifdef NICE_USELIB_CPPUNIT
|
|
|
-
|
|
|
-#include <string>
|
|
|
-#include <exception>
|
|
|
-#include <iostream>
|
|
|
-#include <fstream>
|
|
|
-
|
|
|
-//----------
|
|
|
-
|
|
|
-#include <core/basics/Timer.h>
|
|
|
-
|
|
|
-//----------
|
|
|
-
|
|
|
-#include <vislearning/cbaselib/ClassificationResults.h>
|
|
|
-#include <vislearning/classifier/kernelclassifier/KCGPRegOneVsAll.h>
|
|
|
-
|
|
|
-//----------
|
|
|
-
|
|
|
-#include "gp-hik-exp/GPHIKClassifierNICE.h"
|
|
|
-
|
|
|
-//----------
|
|
|
-
|
|
|
-#include "TestGPHIKClassifier.h"
|
|
|
-
|
|
|
-
|
|
|
-const bool verbose = false;
|
|
|
-const bool verboseStartEnd = true;
|
|
|
-
|
|
|
-using namespace OBJREC;
|
|
|
-using namespace NICE;
|
|
|
-using namespace std;
|
|
|
-
|
|
|
-CPPUNIT_TEST_SUITE_REGISTRATION( TestGPHIKClassifier );
|
|
|
-
|
|
|
-void TestGPHIKClassifier::setUp() {
|
|
|
-}
|
|
|
-
|
|
|
-void TestGPHIKClassifier::tearDown() {
|
|
|
-}
|
|
|
-
|
|
|
-void myClassifierTest( GPHIKClassifierNICE & classifier, const Matrix & mX, const Vector & vY )
|
|
|
-{
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::myClassifierTest ===================== " << std::endl;
|
|
|
-
|
|
|
- Examples examples;
|
|
|
-
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- if ( i % 2 == 1 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- examples.push_back ( pair<int, Example> ( vY[i], example ) );
|
|
|
- }
|
|
|
-
|
|
|
- FeaturePool fp; // will be ignored
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "preparation done." << std::endl;
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "learning ..." << std::endl;
|
|
|
- classifier.train ( fp, examples );
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "testing ..." << std::endl;
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- if ( i % 2 == 0 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- ClassificationResult r = classifier.classify ( example );
|
|
|
- if (verbose)
|
|
|
- {
|
|
|
- r.scores >> std::cerr;
|
|
|
- std::cerr << "predicted uncertainty: " << r.uncertainty << std::endl;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- examples.clean();
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::myClassifierTest done ===================== " << std::endl;
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-void myClassifierStoreRestoreTest( GPHIKClassifierNICE & classifier, const Matrix & mX, const Vector & vY )
|
|
|
-{
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::myClassifierStoreRestoreTest ===================== " << std::endl;
|
|
|
-
|
|
|
- Examples examples;
|
|
|
-
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- if ( i % 2 == 1 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- examples.push_back ( pair<int, Example> ( vY[i], example ) );
|
|
|
- }
|
|
|
-
|
|
|
- FeaturePool fp; // will be ignored
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "preparation done." << std::endl;
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "learning ..." << std::endl;
|
|
|
- classifier.train ( fp, examples );
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "storing ..." << std::endl;
|
|
|
- //test the store-functionality
|
|
|
- string destination("/tmp/GPHIK_store.txt");
|
|
|
-
|
|
|
- std::filebuf fb;
|
|
|
- fb.open (destination.c_str(),ios::out);
|
|
|
- std::ostream os(&fb);
|
|
|
-//
|
|
|
- classifier.store(os);
|
|
|
-//
|
|
|
- fb.close();
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "loading ..." << std::endl;
|
|
|
-
|
|
|
- Config confTmp;
|
|
|
- GPHIKClassifierNICE classifierRestored(&confTmp);
|
|
|
-
|
|
|
- std::filebuf fbIn;
|
|
|
- fbIn.open (destination.c_str(),ios::in);
|
|
|
- std::istream is(&fbIn);
|
|
|
-//
|
|
|
- classifierRestored.restore(is);
|
|
|
-//
|
|
|
- fbIn.close();
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "testing ..." << std::endl;
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- if ( i % 2 == 0 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- ClassificationResult rOrig = classifier.classify ( example );
|
|
|
- ClassificationResult rRestored = classifierRestored.classify ( example );
|
|
|
-
|
|
|
- //scores are of type FullVector
|
|
|
- //we use the [] operator, since there are no iterators given in FullVector.h
|
|
|
- bool equal(true);
|
|
|
- for (int i = 0; i< rOrig.scores.size(); i++)
|
|
|
- {
|
|
|
- if ( fabs(rOrig.scores[i] - rRestored.scores[i]) > 10-6)
|
|
|
- {
|
|
|
- equal = false;
|
|
|
- break;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- CPPUNIT_ASSERT_EQUAL ( equal, true );
|
|
|
- }
|
|
|
-
|
|
|
- examples.clean();
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::myClassifierStoreRestoreTest done ===================== " << std::endl;
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-void myClassifierILTest( OBJREC::GPHIKClassifierNICE & classifierRetrain,
|
|
|
- OBJREC::GPHIKClassifierNICE & classifierIL,
|
|
|
- const Matrix & mX,
|
|
|
- const Vector & vY
|
|
|
- )
|
|
|
-{
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::myClassifierILTest ===================== " << std::endl;
|
|
|
-
|
|
|
- Examples examples;
|
|
|
-
|
|
|
- if (verbose)
|
|
|
- std::cerr << "vY: " << vY << std::endl;
|
|
|
-
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- {
|
|
|
- if ( i % 4 == 1 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- examples.push_back ( pair<int, Example> ( vY[i], example ) );
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- if (verbose)
|
|
|
- std::cerr << "examples.size(): " << examples.size() << std::endl;
|
|
|
-
|
|
|
- FeaturePool fp; // will be ignored
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "preparation done." << std::endl;
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "learning ..." << std::endl;
|
|
|
- Timer t;
|
|
|
- t.start();
|
|
|
- classifierIL.train ( fp, examples );
|
|
|
- t.stop();
|
|
|
- std::cerr << "Time used for initial training: " << t.getLast() << std::endl;
|
|
|
-
|
|
|
- //choose next example(s)
|
|
|
-
|
|
|
- int i_numExamplesToAdd ( 2 );
|
|
|
-
|
|
|
- Examples newExamples;
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- {
|
|
|
- if ( i % 4 == 3 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector();
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- newExamples.push_back ( std::pair<int, Example> ( vY[i], example ) );
|
|
|
- }
|
|
|
-
|
|
|
- if ( newExamples.size() == i_numExamplesToAdd )
|
|
|
- break;
|
|
|
- }
|
|
|
-
|
|
|
- i_numExamplesToAdd = std::min ( i_numExamplesToAdd, (int) newExamples.size() );
|
|
|
- //add the new features to feature pool needed for batch training
|
|
|
- for (uint i = 0; i < i_numExamplesToAdd; i++)
|
|
|
- {
|
|
|
- examples.push_back( newExamples[i] );
|
|
|
- }
|
|
|
-
|
|
|
- std::cerr << std::endl << " =============== " << std::endl << "We train the second classifier from scratch with the additional new example" << std::endl;
|
|
|
- t.start();
|
|
|
-
|
|
|
- classifierRetrain.train ( fp, examples );
|
|
|
-
|
|
|
- t.stop();
|
|
|
- std::cerr << "Time used for batch training: " << t.getLast() << std::endl;
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << std::endl << " =============== " << std::endl << "incremental learning ..." << std::endl;
|
|
|
-
|
|
|
- // add them to classifierIL
|
|
|
- t.start();
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "We add " << i_numExamplesToAdd << " new examples" << std::endl;
|
|
|
- if ( i_numExamplesToAdd > 1 )
|
|
|
- classifierIL.addMultipleExamples( newExamples );
|
|
|
- else if ( i_numExamplesToAdd == 1 )
|
|
|
- classifierIL.addExample( newExamples[0].second, newExamples[0].first);
|
|
|
- else
|
|
|
- {
|
|
|
- //nothing to do
|
|
|
- }
|
|
|
-
|
|
|
- t.stop();
|
|
|
- std::cerr << "Time used for incremental training: " << t.getLast() << std::endl;
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- //evaluate both and compare the resulting scores
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "testing ..." << std::endl;
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- if ( i % 2 == 0 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- ClassificationResult resultIL = classifierIL.classify ( example );
|
|
|
- ClassificationResult resultBatch = classifierRetrain.classify ( example );
|
|
|
-
|
|
|
- if (verbose)
|
|
|
- {
|
|
|
- std::cerr << "result of IL classifier: " << std::endl;
|
|
|
- resultIL.scores >> std::cerr;
|
|
|
-
|
|
|
- std::cerr << "result of batch classifier: " << std::endl;
|
|
|
- resultBatch.scores >> std::cerr;
|
|
|
- }
|
|
|
-
|
|
|
- //scores are of type FullVector
|
|
|
- //we use the [] operator, since there are no iterators given in FullVector.h
|
|
|
- bool equal(true);
|
|
|
- for (int i = 0; i< resultIL.scores.size(); i++)
|
|
|
- {
|
|
|
- if ( fabs(resultIL.scores[i] - resultBatch.scores[i]) > 10e-3)
|
|
|
- {
|
|
|
- std::cerr << " resultIL.scores[i]: " << resultIL.scores[i] << " resultBatch.scores[i]: " << resultBatch.scores[i] << std::endl;
|
|
|
- equal = false;
|
|
|
- break;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- CPPUNIT_ASSERT_EQUAL ( equal, true );
|
|
|
- }
|
|
|
-
|
|
|
- examples.clean();
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::myClassifierILTest done ===================== " << std::endl;
|
|
|
-}
|
|
|
-
|
|
|
-void TestGPHIKClassifier::testGPHIKClassifier()
|
|
|
-{
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::testGPHIKClassifier ===================== " << std::endl;
|
|
|
-
|
|
|
- NICE::Config conf;
|
|
|
- conf.sD( "GPHIKClassifier", "noise", 0.01 );
|
|
|
- conf.sD( "GPHIKClassifier", "parameter_lower_bound", 0.5 );
|
|
|
- conf.sD( "GPHIKClassifier", "parameter_upper_bound", 3.5 );
|
|
|
-// conf.sS( "GPHIKClassifier", "optimization_method", "none");
|
|
|
-// conf.sD( "GPHIKClassifier", "performOptimizationAfterIncrement", false );
|
|
|
- conf.sS( "GPHIKClassifier", "optimization_method", "downhillsimplex");
|
|
|
- conf.sD( "GPHIKClassifier", "performOptimizationAfterIncrement", true );
|
|
|
- conf.sB( "GPHIKClassifier", "uncertaintyPredictionForClassification", false);
|
|
|
-
|
|
|
- OBJREC::GPHIKClassifierNICE * classifier = new OBJREC::GPHIKClassifierNICE ( &conf, "GPHIKClassifier" );
|
|
|
-
|
|
|
- NICE::Matrix mX;
|
|
|
- NICE::Vector vY;
|
|
|
- NICE::Vector vY_multi;
|
|
|
-
|
|
|
-// ifstream ifs ("toyExample1.data", ios::in);
|
|
|
-// ifstream ifs ("toyExampleLargeScale.data", ios::in);
|
|
|
- ifstream ifs ("toyExampleLargeLargeScale.data", ios::in);
|
|
|
- CPPUNIT_ASSERT ( ifs.good() );
|
|
|
- ifs >> mX;
|
|
|
- ifs >> vY;
|
|
|
- ifs >> vY_multi;
|
|
|
- ifs.close();
|
|
|
-
|
|
|
- if (verbose)
|
|
|
- {
|
|
|
- std::cerr << "data loaded: mX" << std::endl;
|
|
|
- std::cerr << mX << std::endl;
|
|
|
- std::cerr << "vY: " << std::endl;
|
|
|
- std::cerr << vY << std::endl;
|
|
|
- std::cerr << "vY_multi: " << std::endl;
|
|
|
- std::cerr << vY_multi << std::endl;
|
|
|
- }
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "Binary classification test " << std::endl;
|
|
|
-
|
|
|
- myClassifierTest ( *classifier, mX, vY );
|
|
|
-
|
|
|
- // ... we remove nothing here since we are only interested in store and restore :)
|
|
|
- myClassifierStoreRestoreTest ( *classifier, mX, vY );
|
|
|
-
|
|
|
- // ... remove previously computed things and start again, this time with incremental settings
|
|
|
- if (classifier != NULL)
|
|
|
- delete classifier;
|
|
|
-
|
|
|
- classifier = new OBJREC::GPHIKClassifierNICE ( &conf, "GPHIKClassifier" );
|
|
|
- OBJREC::GPHIKClassifierNICE * classifierBatch = new OBJREC::GPHIKClassifierNICE ( &conf, "GPHIKClassifier" );
|
|
|
-
|
|
|
- myClassifierILTest( *classifierBatch, *classifier, mX, vY );
|
|
|
-
|
|
|
- if (classifier != NULL)
|
|
|
- delete classifier;
|
|
|
- if (classifierBatch != NULL)
|
|
|
- delete classifierBatch;
|
|
|
-
|
|
|
- classifier = new OBJREC::GPHIKClassifierNICE ( &conf, "GPHIKClassifier" );
|
|
|
- classifierBatch = new OBJREC::GPHIKClassifierNICE ( &conf, "GPHIKClassifier" );
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "Multi-class classification test " << std::endl;
|
|
|
- myClassifierTest ( *classifier, mX, vY_multi );
|
|
|
-
|
|
|
- // ... we remove nothing here since we are only interested and store and restore :)
|
|
|
-//
|
|
|
-// myClassifierStoreRestoreTest ( classifier, mX, vY_multi );
|
|
|
-
|
|
|
- // ... remove previously computed things and start again, this time with incremental settings
|
|
|
- if (classifier != NULL)
|
|
|
- delete classifier;
|
|
|
- if (classifierBatch != NULL)
|
|
|
- delete classifierBatch;
|
|
|
-
|
|
|
- classifier = new GPHIKClassifierNICE ( &conf, "GPHIKClassifier" );
|
|
|
- classifierBatch = new GPHIKClassifierNICE ( &conf, "GPHIKClassifier" );
|
|
|
-
|
|
|
- myClassifierILTest( *classifierBatch, *classifier, mX, vY_multi );
|
|
|
-
|
|
|
- if (classifier != NULL)
|
|
|
- delete classifier;
|
|
|
- if (classifierBatch != NULL)
|
|
|
- delete classifierBatch;
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::testGPHIKClassifier done ===================== " << std::endl;
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-void TestGPHIKClassifier::testGPHIKVariance()
|
|
|
-{
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::testGPHIKVariance ===================== " << std::endl;
|
|
|
-
|
|
|
- double noise (0.01);
|
|
|
-
|
|
|
- Config conf;
|
|
|
- conf.sD( "GPHIKClassifier", "noise", noise );
|
|
|
- conf.sD( "GPHIKClassifier", "parameter_lower_bound", 1.0 );
|
|
|
- conf.sD( "GPHIKClassifier", "parameter_upper_bound", 1.0 );
|
|
|
- conf.sS( "GPHIKClassifier", "varianceApproximation", "approximate_rough");
|
|
|
- conf.sB( "GPHIKClassifier", "learn_balanced", true);
|
|
|
- conf.sB( "GPHIKClassifier", "uncertaintyPredictionForClassification", true);
|
|
|
-
|
|
|
- GPHIKClassifierNICE classifier ( &conf );
|
|
|
-
|
|
|
- Config confVarApproxQuant(conf);
|
|
|
- confVarApproxQuant.sB( "GPHIKClassifier", "use_quantization", true );
|
|
|
- GPHIKClassifierNICE classifierQuant ( &confVarApproxQuant );
|
|
|
-
|
|
|
- Config confVarApproxFine1(conf);
|
|
|
- confVarApproxFine1.sS( "GPHIKClassifier", "varianceApproximation", "approximate_fine");
|
|
|
- confVarApproxFine1.sI( "GPHIKClassifier", "nrOfEigenvaluesToConsiderForVarApprox", 1);
|
|
|
-
|
|
|
- GPHIKClassifierNICE classifierVarApproxFine1 ( &confVarApproxFine1 );
|
|
|
-
|
|
|
- Config confVarApproxFine2(conf);
|
|
|
- confVarApproxFine2.sS( "GPHIKClassifier", "varianceApproximation", "approximate_fine");
|
|
|
- confVarApproxFine2.sI( "GPHIKClassifier", "nrOfEigenvaluesToConsiderForVarApprox", 2);
|
|
|
-
|
|
|
- GPHIKClassifierNICE classifierVarApproxFine2 ( &confVarApproxFine2 );
|
|
|
-
|
|
|
- Config confExact(conf);
|
|
|
- confExact.sS( "GPHIKClassifier", "varianceApproximation", "exact");
|
|
|
-
|
|
|
- GPHIKClassifierNICE classifierVarExact ( &confExact );
|
|
|
-
|
|
|
- NICE::Matrix mX;
|
|
|
- NICE::Vector vY;
|
|
|
- NICE::Vector vY_multi;
|
|
|
-
|
|
|
- ifstream ifs ("toyExample2.data", ios::in);
|
|
|
- CPPUNIT_ASSERT ( ifs.good() );
|
|
|
- ifs >> mX;
|
|
|
- ifs >> vY;
|
|
|
- ifs >> vY_multi;
|
|
|
- ifs.close();
|
|
|
-
|
|
|
- if (verbose)
|
|
|
- {
|
|
|
- std::cerr << "data loaded: mX" << std::endl;
|
|
|
- std::cerr << mX << std::endl;
|
|
|
- std::cerr << "vY: " << std::endl;
|
|
|
- std::cerr << vY << std::endl;
|
|
|
- std::cerr << "vY_multi: " << std::endl;
|
|
|
- std::cerr << vY_multi << std::endl;
|
|
|
- }
|
|
|
-
|
|
|
- Examples examples;
|
|
|
-
|
|
|
- for ( uint i = 0 ; i < vY.size() ; i++ )
|
|
|
- if ( i % 2 == 0 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- examples.push_back ( pair<int, Example> ( vY_multi[i], example ) );
|
|
|
- }
|
|
|
-
|
|
|
- FeaturePool fp; // will be ignored
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "preparation for variance testing done." << std::endl;
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "learning for variance testing ..." << std::endl;
|
|
|
- classifier.train ( fp, examples );
|
|
|
- classifierQuant.train ( fp, examples );
|
|
|
- classifierVarApproxFine1.train ( fp, examples );
|
|
|
- classifierVarApproxFine2.train ( fp, examples );
|
|
|
- classifierVarExact.train ( fp, examples );
|
|
|
-
|
|
|
- if ( verbose )
|
|
|
- std::cerr << "testing for variance testing ..." << std::endl;
|
|
|
-
|
|
|
- for ( uint i = 0 ; i < vY_multi.size() ; i++ )
|
|
|
- if ( i % 2 == 1 )
|
|
|
- {
|
|
|
- Example example;
|
|
|
- example.svec = new SparseVector;
|
|
|
- example.svec->setDim(3);
|
|
|
- example.svec->set ( 0, mX(i,0) );
|
|
|
- example.svec->set ( 1, mX(i,1) );
|
|
|
- example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
|
|
|
- ClassificationResult r = classifier.classify ( example );
|
|
|
- ClassificationResult rQuant = classifierQuant.classify ( example );
|
|
|
- ClassificationResult rVarApproxFine1 = classifierVarApproxFine1.classify ( example );
|
|
|
- ClassificationResult rVarApproxFine2 = classifierVarApproxFine2.classify ( example );
|
|
|
- ClassificationResult rExact = classifierVarExact.classify ( example );
|
|
|
-
|
|
|
-// if (verbose)
|
|
|
-// {
|
|
|
- std::cerr << "approxUnc: " << r.uncertainty << " approxUncQuant: " << rQuant.uncertainty<< " approxUncFine1: " << rVarApproxFine1.uncertainty << " approxUncFine2: " << rVarApproxFine2.uncertainty << " exactUnc: " << rExact.uncertainty << std::endl;
|
|
|
-// }
|
|
|
-
|
|
|
- CPPUNIT_ASSERT ( r.uncertainty <= (1.0 + noise) ); //using the "standard" HIK, this is the upper bound
|
|
|
- CPPUNIT_ASSERT ( r.uncertainty > rVarApproxFine1.uncertainty);
|
|
|
- CPPUNIT_ASSERT ( rQuant.uncertainty > rVarApproxFine1.uncertainty);
|
|
|
- CPPUNIT_ASSERT ( rVarApproxFine1.uncertainty > rVarApproxFine2.uncertainty);
|
|
|
- CPPUNIT_ASSERT ( rVarApproxFine2.uncertainty > rExact.uncertainty);
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
- examples.clean();
|
|
|
-
|
|
|
-
|
|
|
- if (verboseStartEnd)
|
|
|
- std::cerr << "================== TestGPHIKClassifier::testGPHIKVariance done ===================== " << std::endl;
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-#endif
|