Эх сурвалжийг харах

Merged FP-version of GPHIK-Classifier into vislearning

Alexander Freytag 12 жил өмнө
parent
commit
9a4067f75b

+ 249 - 0
classifier/fpclassifier/GPHIKClassifierNICE.cpp

@@ -0,0 +1,249 @@
+/** 
+* @file GPHIKClassifierNICE.cpp
+* @brief feature pool interface for our GP HIK classifier
+* @author Alexander Freytag
+* @date 02/01/2012
+
+*/
+#include <iostream>
+
+#include "core/basics/numerictools.h"
+#include <core/basics/Timer.h>
+
+#include "GPHIKClassifierNICE.h"
+
+using namespace std;
+using namespace NICE;
+using namespace OBJREC;
+
+
+GPHIKClassifierNICE::GPHIKClassifierNICE( const Config *conf, const string & confSection ) 
+{
+  this->verbose = conf->gB(confSection, "verbose", false);
+  this->useSimpleBalancing = conf->gB(confSection, "use_simple_balancing", false);
+  this->minSamples = conf->gI(confSection, "min_samples", -1);
+  this->performOptimizationAfterIncrement = conf->gB(confSection, "performOptimizationAfterIncrement", true);
+  
+  classifier = new GPHIKClassifier(conf, confSection);
+}
+
+GPHIKClassifierNICE::~GPHIKClassifierNICE()
+{
+  if ( classifier != NULL )
+    delete classifier;
+}
+
+ClassificationResult GPHIKClassifierNICE::classify ( Example & pe )
+{
+  const SparseVector *svec = pe.svec;
+
+  if ( svec == NULL )
+    fthrow(Exception, "GPHIKClassifierNICE requires example.svec (SparseVector stored in an Example struct)");
+ return this->classify( svec ); 
+}
+
+ClassificationResult GPHIKClassifierNICE::classify ( const NICE::SparseVector * example )
+{
+  NICE::SparseVector scores;
+  int result;
+  
+  double uncertainty;
+ 
+  classifier->classify ( example,  result, scores, uncertainty);
+  
+  if ( scores.size() == 0 ) {
+    fthrow(Exception, "Zero scores, something is likely to be wrong here: svec.size() = " << example->size() );
+  }
+  int classes = scores.getDim();
+  FullVector fvscores(classes);
+  
+  NICE::SparseVector::const_iterator it;
+  for(int c = 0; c < classes; c++)
+  {
+    it = scores.find(c);
+    if ( it == scores.end() )
+      fvscores[c] = -std::numeric_limits<double>::max();
+    else
+      fvscores[c] = it->second;
+  }
+
+  ClassificationResult r ( fvscores.maxElement(), fvscores );
+  r.uncertainty = uncertainty;
+  
+  if (verbose)
+  {
+    std::cerr << " GPHIKClassifierNICE::classify scores" << std::endl;
+    scores.store(std::cerr);
+    std::cerr << " GPHIKClassifierNICE::classify fvscores" << std::endl;
+    fvscores.store(std::cerr);
+  }
+
+  return r;
+}
+
+/** training process */
+void GPHIKClassifierNICE::train ( FeaturePool & fp, Examples & examples )
+{
+  // we completely ignore the feature pool :)
+  //
+  initRand(0);
+  Vector classCounts;
+  int minClass = -1;
+  
+  if (verbose) 
+    std::cerr << "GPHIKClassifierNICE::train" << std::endl;
+
+  if ( useSimpleBalancing)
+  {
+    classCounts.resize( examples.getMaxClassNo()+1 );
+    classCounts.set( 0.0 );
+    for ( uint i = 0 ; i < examples.size() ; i++ )
+      classCounts[ examples[i].first ]++;
+    // we need a probability distribution
+    //classCounts.normalizeL1();
+    // we need the class index of the class with the least non-zero examples
+    for ( uint i = 0 ; i < classCounts.size(); i++ )
+      if ( (classCounts[i] > 0) && ((minClass < 0) || (classCounts[i] < classCounts[minClass])) )
+        minClass = i;
+    if (verbose)
+    {
+      cerr << "Class distribution: " << classCounts << endl;
+      cerr << "Class with the least number of examples: " << minClass << endl;
+    }
+    if(minSamples < 0)
+      minSamples = classCounts[minClass];
+  }
+
+  // (multi-class) label vector
+  Vector y ( examples.size() /* maximum size */ );
+
+  // flat structure of our training data
+  std::vector< SparseVector * > sparseExamples;
+
+  if (verbose)
+    cerr << "Converting (and sampling) feature vectors" << endl;
+  for ( uint i = 0 ; i < examples.size() ; i++ )
+  {
+    const Example & example = examples[i].second;
+    int classno = examples[i].first;
+    
+    // simple weird balancing method
+    if ( useSimpleBalancing ) 
+    {
+      double t = randDouble() * classCounts[classno];
+      if ( t >= minSamples ) continue;
+    }
+
+    y[ sparseExamples.size() ] = classno;
+    if ( example.svec == NULL )
+      fthrow(Exception, "GPHIKClassifierNICE requires example.svec (SparseVector stored in an Example struct)");
+    sparseExamples.push_back( example.svec );    
+  }
+
+  // we only use a subset for training
+  y.resize( sparseExamples.size() );
+  
+  classifier->train(sparseExamples, y);
+}
+
+/** training process */
+void GPHIKClassifierNICE::train ( const std::vector< SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels )
+{
+  classifier->train(examples, binLabels);
+}
+
+void GPHIKClassifierNICE::clear ()
+{
+  if ( classifier != NULL )
+    delete classifier;
+  classifier = NULL;
+}
+
+FeaturePoolClassifier *GPHIKClassifierNICE::clone () const
+{
+  fthrow(Exception, "GPHIKClassifierNICE: clone() not yet implemented" );
+
+  return NULL;
+}
+
+void GPHIKClassifierNICE::predictUncertainty( Example & pe, NICE::Vector & uncertainties )
+{
+  const SparseVector *svec = pe.svec;  
+  if ( svec == NULL )
+    fthrow(Exception, "GPHIKClassifierNICE requires example.svec (SparseVector stored in an Example struct)");
+  classifier->predictUncertainty(svec, uncertainties);
+}
+   
+void GPHIKClassifierNICE::predictUncertainty( const NICE::SparseVector * example, NICE::Vector & uncertainties )
+{  
+  classifier->predictUncertainty(example, uncertainties);
+}
+
+//---------------------------------------------------------------------
+//                           protected methods
+//---------------------------------------------------------------------
+void GPHIKClassifierNICE::restore ( std::istream & is, int format )
+{
+  if (is.good())
+  {
+    classifier->restore(is, format);  
+    
+    std::string tmp;
+    is >> tmp; //"performOptimizationAfterIncrement: "
+    is >> this->performOptimizationAfterIncrement;
+  }
+  else
+  {
+    std::cerr << "GPHIKClassifierNICE::restore -- InStream not initialized - restoring not possible!" << std::endl;
+  }
+}
+
+void GPHIKClassifierNICE::store ( std::ostream & os, int format ) const
+{
+  if (os.good())
+  {
+    os.precision (numeric_limits<double>::digits10 + 1);
+    
+    classifier->store(os, format);
+    
+    os << "performOptimizationAfterIncrement: " << performOptimizationAfterIncrement << std::endl;
+  }
+  else
+  {
+    std::cerr << "OutStream not initialized - storing not possible!" << std::endl;
+  }
+}
+
+void GPHIKClassifierNICE::addExample( const Example & pe, const double & label)
+{
+  const SparseVector *svec = pe.svec;
+  classifier->addExample(svec, label, this->performOptimizationAfterIncrement);
+}
+
+void GPHIKClassifierNICE::addMultipleExamples( Examples & newExamples)
+{
+  //are new examples available? If not, nothing has to be done
+  if ( newExamples.size() < 1)
+    return;
+  
+  // (multi-class) label vector
+  Vector y ( newExamples.size() );
+
+  // flat structure of our training data
+  std::vector< const SparseVector * > sparseExamples;
+
+  if (verbose)
+    cerr << "Converting (and sampling) feature vectors" << endl;
+  for ( uint i = 0 ; i < newExamples.size() ; i++ )
+  {
+    const Example & example = newExamples[i].second;
+    int classno = newExamples[i].first;
+
+    y[ i ] = classno;
+    if ( example.svec == NULL )
+      fthrow(Exception, "GPHIKClassifierNICE requires example.svec (SparseVector stored in an Example struct)");
+    sparseExamples.push_back( example.svec );    
+  }  
+  
+  classifier->addMultipleExamples(sparseExamples, y, this->performOptimizationAfterIncrement);  
+}

+ 99 - 0
classifier/fpclassifier/GPHIKClassifierNICE.h

@@ -0,0 +1,99 @@
+/** 
+* @file GPHIKClassifierNICE.h
+* @author Alexander Freytag, Erik Rodner
+* @date 02/01/2012
+
+*/
+#ifndef _NICE_GPHIKCLASSIFIERNICEINCLUDE
+#define _NICE_GPHIKCLASSIFIERNICEINCLUDE
+
+#include <string>
+#include "core/basics/Config.h"
+#include "vislearning/classifier/classifierbase/FeaturePoolClassifier.h"
+
+#include <gp-hik-core/GPHIKClassifier.h>
+#include <gp-hik-core/FMKGPHyperparameterOptimization.h>
+#include <gp-hik-core/parameterizedFunctions/ParameterizedFunction.h>
+
+namespace OBJREC {
+  
+/** @class GPHIKClassifierNICE
+ * Wrapper class (feature pool interface) for our GP HIK classifier 
+ *
+ * @author Alexander Freytag, Erik Rodner
+ */
+class GPHIKClassifierNICE : public FeaturePoolClassifier
+{
+
+  protected:
+    
+    NICE::GPHIKClassifier * classifier;
+    
+    /** verbose flag for useful output*/
+    bool verbose;
+    
+    /** a simple balancing strategy: use only that many examples of each class, as the smallest class provides*/
+    bool useSimpleBalancing; 
+    int minSamples;
+    
+    /** When adding new examples, do we want to run a whole optimization of all involved hyperparameters? default: true*/
+    bool performOptimizationAfterIncrement;
+
+  public:
+
+    /** simple constructor */
+    GPHIKClassifierNICE( const NICE::Config *conf, const std::string & confSection = "GPHIKClassifier" );
+      
+    /** simple destructor */
+    virtual ~GPHIKClassifierNICE();
+   
+    /** 
+    * @brief classify a given example with the previously learnt model
+    * @param pe example to be classified given in a sparse representation
+    */
+    virtual ClassificationResult classify ( OBJREC::Example & pe );
+    /** 
+     * @brief classify a given example with the previously learnt model
+     * @date 19-06-2012 (dd-mm-yyyy)
+     * @author Alexander Freytag
+     * @param examples example to be classified given in a sparse representation
+     */    
+    ClassificationResult classify ( const NICE::SparseVector * example );
+
+    /** training process */
+    virtual void train ( OBJREC::FeaturePool & fp, OBJREC::Examples & examples );
+    /** 
+     * @brief train this classifier using a given set of examples and a given set of binary label vectors 
+     * @date 19-06-2012 (dd-mm-yyyy)
+     * @author Alexander Freytag
+     * @param examples examples to use given in a sparse data structure
+     * @param binLabels corresponding binary labels with class no. There is no need here that every examples has only on positive entry in this set (1,-1)
+     */
+    void train ( const std::vector< NICE::SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels );
+    
+    /** Persistent interface */
+    virtual void restore ( std::istream & is, int format = 0 );
+    virtual void store ( std::ostream & os, int format = 0 ) const;
+    virtual void clear ();
+
+    virtual FeaturePoolClassifier *clone () const;
+    
+    /** prediction of classification uncertainty */
+    void predictUncertainty( OBJREC::Example & pe, NICE::Vector & uncertainties );
+    /** 
+     * @brief prediction of classification uncertainty
+     * @date 19-06-2012 (dd-mm-yyyy)
+     * @author Alexander Freytag
+     * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
+     * @param uncertainties contains the resulting classification uncertainties (1 entry for standard setting, m entries for binary-balanced setting)
+     */       
+    void predictUncertainty( const NICE::SparseVector * example, NICE::Vector & uncertainties );
+    
+    void addExample( const OBJREC::Example & pe, const double & label);
+    virtual void addMultipleExamples( OBJREC::Examples & newExamples);
+    
+};
+
+}
+
+#endif

+ 3 - 1
classifier/fpclassifier/libdepend.inc

@@ -1 +1,3 @@
-$(call PKG_DEPEND_INT,vislearning/classifier/classifierbase)
+$(call PKG_DEPEND_INT,vislearning/classifier/classifierbase)
+$(call PKG_DEPEND_INT,gp-hik-core/)
+

+ 89 - 0
classifier/fpclassifier/tests/Makefile.inc

@@ -0,0 +1,89 @@
+# BINARY-DIRECTORY-MAKEFILE
+# conventions:
+# - there are no subdirectories, they are ignored!
+# - all ".C", ".cpp" and ".c" files in the current directory are considered
+#   independent binaries, and linked as such.
+# - the binaries depend on the library of the parent directory
+# - the binary names are created with $(BINNAME), i.e. it will be more or less
+#   the name of the .o file
+# - all binaries will be added to the default build list ALL_BINARIES
+
+# --------------------------------
+# - remember the last subdirectory
+#
+# set the variable $(SUBDIR) correctly to the current subdirectory. this
+# variable can be used throughout the current makefile.inc. The many 
+# SUBDIR_before, _add, and everything are only required so that we can recover
+# the previous content of SUBDIR before exitting the makefile.inc
+
+SUBDIR_add:=$(dir $(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)))
+SUBDIR_before:=$(SUBDIR)
+SUBDIR:=$(strip $(SUBDIR_add))
+SUBDIR_before_$(SUBDIR):=$(SUBDIR_before)
+
+# ------------------------
+# - include subdirectories
+#
+# note the variables $(SUBDIRS_OF_$(SUBDIR)) are required later on to recover
+# the dependencies automatically. if you handle dependencies on your own, you
+# can also dump the $(SUBDIRS_OF_$(SUBDIR)) variable, and include the
+# makefile.inc of the subdirectories on your own...
+
+#SUBDIRS_OF_$(SUBDIR):=$(patsubst %/Makefile.inc,%,$(wildcard $(SUBDIR)*/Makefile.inc))
+#include $(SUBDIRS_OF_$(SUBDIR):%=%/Makefile.inc)
+
+# ----------------------------
+# - include local dependencies
+#
+# include the libdepend.inc file, which gives additional dependencies for the
+# libraries and binaries. additionally, an automatic dependency from the library
+# of the parent directory is added (commented out in the code below).
+
+-include $(SUBDIR)libdepend.inc
+
+PARENTDIR:=$(patsubst %/,%,$(dir $(patsubst %/,%,$(SUBDIR))))
+$(call PKG_DEPEND_INT,$(PARENTDIR))
+$(call PKG_DEPEND_EXT,CPPUNIT)
+
+# ---------------------------
+# - objects in this directory
+#
+# the use of the variable $(OBJS) is not mandatory. it is mandatory however
+# to update $(ALL_OBJS) in a way that it contains the path and name of
+# all objects. otherwise we can not include the appropriate .d files.
+
+OBJS:=$(patsubst %.cpp,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.cpp))) \
+      $(patsubst %.C,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.C))) \
+      $(shell grep -ls Q_OBJECT $(SUBDIR)*.h | sed -e's@^@/@;s@.*/@$(OBJDIR)moc_@;s@\.h$$@.o@') \
+      $(patsubst %.c,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.c)))
+ALL_OBJS += $(OBJS)
+
+# ----------------------------
+# - binaries in this directory
+#
+# output of binaries in this directory. none of the variables has to be used.
+# but everything you add to $(ALL_LIBRARIES) and $(ALL_BINARIES) will be
+# compiled with `make all`. be sure again to add the files with full path.
+
+CHECKS:=$(BINDIR)$(call LIBNAME,$(SUBDIR))
+ALL_CHECKS+=$(CHECKS)
+
+# ---------------------
+# - binary dependencies
+#
+# there is no way of determining the binary dependencies automatically, so we
+# follow conventions. each binary depends on the corresponding .o file and
+# on the libraries specified by the INTLIBS/EXTLIBS. these dependencies can be
+# specified manually or they are automatically stored in a .bd file.
+
+$(foreach head,$(wildcard $(SUBDIR)*.h),$(eval $(shell grep -q Q_OBJECT $(head) && echo $(head) | sed -e's@^@/@;s@.*/\(.*\)\.h$$@$(BINDIR)\1:$(OBJDIR)moc_\1.o@')))
+$(eval $(foreach c,$(CHECKS),$(c):$(BUILDDIR)$(CPPUNIT_MAIN_OBJ) $(OBJS) $(call PRINT_INTLIB_DEPS,$(c),.a)))
+
+# -------------------
+# - subdir management
+#
+# as the last step, always add this line to correctly recover the subdirectory
+# of the makefile including this one!
+
+SUBDIR:=$(SUBDIR_before_$(SUBDIR))
+

+ 536 - 0
classifier/fpclassifier/tests/TestGPHIKClassifier.cpp

@@ -0,0 +1,536 @@
+#ifdef NICE_USELIB_CPPUNIT
+
+#include <string>
+#include <exception>
+#include <iostream>
+#include <fstream>
+
+//----------
+
+#include <core/basics/Timer.h>
+
+//----------
+
+#include <vislearning/cbaselib/ClassificationResults.h>
+#include <vislearning/classifier/kernelclassifier/KCGPRegOneVsAll.h>
+
+//----------
+
+#include "gp-hik-exp/GPHIKClassifierNICE.h"
+
+//----------
+
+#include "TestGPHIKClassifier.h"
+
+
+const bool verbose = false;
+const bool verboseStartEnd = true;
+
+using namespace OBJREC;
+using namespace NICE;
+using namespace std;
+
+CPPUNIT_TEST_SUITE_REGISTRATION( TestGPHIKClassifier );
+
+void TestGPHIKClassifier::setUp() {
+}
+
+void TestGPHIKClassifier::tearDown() {
+}
+
+void myClassifierTest( GPHIKClassifierNICE & classifier, const Matrix & mX, const Vector & vY )
+{
+  
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::myClassifierTest ===================== " << std::endl;
+  
+  Examples examples;
+
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+    if ( i % 2 == 1 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      examples.push_back ( pair<int, Example> ( vY[i], example ) );
+    }
+
+  FeaturePool fp; // will be ignored
+
+  if ( verbose )
+    std::cerr << "preparation done." << std::endl;
+
+  if ( verbose ) 
+    std::cerr << "learning ..." << std::endl;
+  classifier.train ( fp, examples ); 
+
+  if ( verbose )
+    std::cerr << "testing ..." << std::endl;
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+    if ( i % 2 == 0 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      ClassificationResult r = classifier.classify ( example );
+      if (verbose)
+      {
+        r.scores >> std::cerr;
+        std::cerr << "predicted uncertainty: " << r.uncertainty << std::endl;
+      }
+    } 
+
+  examples.clean();
+  
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::myClassifierTest done ===================== " << std::endl;
+
+}
+
+void myClassifierStoreRestoreTest( GPHIKClassifierNICE & classifier, const Matrix & mX, const Vector & vY )
+{
+  
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::myClassifierStoreRestoreTest ===================== " << std::endl;
+  
+  Examples examples;
+
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+    if ( i % 2 == 1 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      examples.push_back ( pair<int, Example> ( vY[i], example ) );
+    }
+
+  FeaturePool fp; // will be ignored
+
+  if ( verbose )
+    std::cerr << "preparation done." << std::endl;
+
+  if ( verbose ) 
+    std::cerr << "learning ..." << std::endl;
+  classifier.train ( fp, examples ); 
+  
+  if ( verbose ) 
+    std::cerr << "storing ..." << std::endl;  
+  //test the store-functionality  
+  string destination("/tmp/GPHIK_store.txt");
+  
+  std::filebuf fb;
+  fb.open (destination.c_str(),ios::out);
+  std::ostream os(&fb);
+//   
+  classifier.store(os);  
+//   
+  fb.close();
+  
+  if ( verbose ) 
+    std::cerr << "loading ..." << std::endl;  
+  
+  Config confTmp;
+  GPHIKClassifierNICE classifierRestored(&confTmp);
+  
+  std::filebuf fbIn;
+  fbIn.open (destination.c_str(),ios::in);
+  std::istream is(&fbIn);
+//   
+  classifierRestored.restore(is);
+//   
+  fbIn.close();    
+
+  if ( verbose )
+    std::cerr << "testing ..." << std::endl;
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+    if ( i % 2 == 0 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      ClassificationResult rOrig = classifier.classify ( example );
+      ClassificationResult rRestored = classifierRestored.classify ( example );
+      
+      //scores are of type FullVector
+      //we use the [] operator, since there are no iterators given in FullVector.h
+      bool equal(true);
+      for (int i = 0; i< rOrig.scores.size(); i++)
+      {
+        if ( fabs(rOrig.scores[i] - rRestored.scores[i]) > 10-6)
+        {
+          equal = false;
+          break;
+        }        
+      }
+      
+      CPPUNIT_ASSERT_EQUAL ( equal, true ); 
+    } 
+
+  examples.clean();
+  
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::myClassifierStoreRestoreTest done ===================== " << std::endl;
+
+}
+
+void myClassifierILTest( GPHIKClassifierNICE & classifierRetrain, GPHIKClassifierNICE & classifierIL, const Matrix & mX, const Vector & vY )
+{
+ 
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::myClassifierILTest ===================== " << std::endl;
+  
+  Examples examples;
+  
+  if (verbose)
+    std::cerr << "vY: " << vY << std::endl;
+
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+  {
+    if ( i % 4 == 1 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      examples.push_back ( pair<int, Example> ( vY[i], example ) );
+    }
+  }
+
+  if (verbose)
+    std::cerr << "examples.size(): " << examples.size()  << std::endl;
+
+  FeaturePool fp; // will be ignored
+
+  if ( verbose )
+    std::cerr << "preparation done." << std::endl;
+
+  if ( verbose ) 
+    std::cerr << "learning ..." << std::endl;
+  classifierIL.train ( fp, examples ); 
+  
+  //choose next example(s)
+  
+  Examples newExamples;
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+  {
+    if ( i % 4 == 3 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      newExamples.push_back ( pair<int, Example> ( vY[i], example ) );
+    }  
+  }
+
+//   if ( verbose ) 
+    std::cerr << std::endl << " =============== " << std::endl << "incremental learning ..." << std::endl;
+  
+  // add them to classifierIL
+//   std::cerr << "We add several new examples" << std::endl;
+  Timer t;
+  t.start();  
+//   for (uint i = 0; i < newExamples.size(); i++)
+  for (uint i = 0; i < 1; i++)
+  {
+    classifierIL.addExample( newExamples[i].second, newExamples[i].first);      
+  }  
+  
+  t.stop();  
+  std::cerr << "Time used for incremental training: " << t.getLast() << std::endl;
+
+  //add the new features to feature pool needed for batch training
+//   for (uint i = 0; i < newExamples.size(); i++)
+  for (uint i = 0; i < 2; i++)
+  {  
+    examples.push_back( newExamples[i] );
+  }
+  
+  std::cerr << std::endl << " =============== " << std::endl << "We train the second classifier from the scratch with the additional new example" << std::endl;
+  t.start(); 
+  
+  classifierRetrain.train ( fp, examples );  
+  
+  t.stop();  
+  std::cerr << "Time used for batch training: " << t.getLast() << std::endl;  
+  
+  //evaluate both and compare the resulting scores
+//  if ( verbose )
+    std::cerr << "testing ..." << std::endl;
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+    if ( i % 2 == 0 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      ClassificationResult resultIL = classifierIL.classify ( example );
+      ClassificationResult resultBatch = classifierRetrain.classify ( example );
+      
+      if (verbose)
+      {
+        std::cerr << "result of IL classifier: " << std::endl;
+        resultIL.scores >> std::cerr;
+        
+        std::cerr << "result of batch classifier: " << std::endl;
+        resultBatch.scores >> std::cerr;
+      }
+      
+      //scores are of type FullVector
+      //we use the [] operator, since there are no iterators given in FullVector.h
+      bool equal(true);
+      for (int i = 0; i< resultIL.scores.size(); i++)
+      {
+        if ( fabs(resultIL.scores[i] - resultBatch.scores[i]) > 10e-3)
+        {
+          equal = false;
+          break;
+        }        
+      }
+      
+      CPPUNIT_ASSERT_EQUAL ( equal, true ); 
+    } 
+
+  examples.clean();
+  
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::myClassifierILTest done ===================== " << std::endl;
+}
+
+void TestGPHIKClassifier::testGPHIKClassifier() 
+{
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::testGPHIKClassifier ===================== " << std::endl;
+
+  Config conf;
+  conf.sD( "GPHIKClassifier", "noise", 0.01 );
+  conf.sD( "GPHIKClassifier", "parameter_lower_bound", 0.5 );
+  conf.sD( "GPHIKClassifier", "parameter_upper_bound", 3.5 );
+  conf.sI( "GPHIKClassifier", "uncertaintyPrediction", 1);
+//   conf.sS( "GPHIKClassifier", "optimization_method", "none");
+  conf.sS( "GPHIKClassifier", "optimization_method", "downhillsimplex");
+  conf.sB( "GPHIKClassifier", "uncertaintyPredictionForClassification", true);
+
+  GPHIKClassifierNICE * classifier  = new GPHIKClassifierNICE ( &conf );
+  
+  Matrix mX;
+  Vector vY;
+  Vector vY_multi;
+
+//   ifstream ifs ("toyExample1.data", ios::in);
+//   ifstream ifs ("toyExampleLargeScale.data", ios::in);
+  ifstream ifs ("toyExampleLargeLargeScale.data", ios::in);
+  CPPUNIT_ASSERT ( ifs.good() );
+  ifs >> mX;
+  ifs >> vY;
+  ifs >> vY_multi;
+  ifs.close();
+  
+  if (verbose)
+  {
+    std::cerr << "data loaded: mX" << std::endl;
+    std::cerr << mX << std::endl;
+    std::cerr << "vY: " << std::endl;
+    std::cerr << vY << std::endl;
+    std::cerr << "vY_multi: " << std::endl;
+    std::cerr << vY_multi << std::endl;
+  }
+
+  if ( verbose )
+    std::cerr << "Binary classification test " << std::endl; 
+
+  myClassifierTest ( *classifier, mX, vY );
+  
+  // ... we remove nothing here since we are only interested in store and restore :)
+  myClassifierStoreRestoreTest ( *classifier, mX, vY );
+  
+  // ... remove previously computed things and start again, this time with incremental settings
+  if (classifier != NULL)
+    delete classifier;
+  
+  classifier  = new GPHIKClassifierNICE ( &conf );
+    GPHIKClassifierNICE * classifierBatch = new GPHIKClassifierNICE ( &conf ); 
+  
+  myClassifierILTest( *classifierBatch, *classifier, mX, vY );
+  
+  if (classifier != NULL)
+    delete classifier;
+  if (classifierBatch != NULL)
+    delete classifierBatch;
+  
+  classifier  = new GPHIKClassifierNICE ( &conf );
+  classifierBatch = new GPHIKClassifierNICE ( &conf );  
+
+  if ( verbose )
+    std::cerr << "Multi-class classification test " << std::endl; 
+  myClassifierTest ( *classifier, mX, vY_multi );
+  
+  // ... we remove nothing here since we are only interested and store and restore :)
+//   
+//   myClassifierStoreRestoreTest ( classifier, mX, vY_multi );
+  
+  // ... remove previously computed things and start again, this time with incremental settings
+  if (classifier != NULL)
+    delete classifier;
+  if (classifierBatch != NULL)
+    delete classifierBatch;
+  
+  classifier  = new GPHIKClassifierNICE ( &conf );
+  classifierBatch = new GPHIKClassifierNICE ( &conf ); 
+  
+  myClassifierILTest( *classifierBatch, *classifier, mX, vY_multi );
+  
+  if (classifier != NULL)
+    delete classifier;
+  if (classifierBatch != NULL)
+    delete classifierBatch;  
+  
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::testGPHIKClassifier done ===================== " << std::endl;
+ 
+}
+
+void TestGPHIKClassifier::testGPHIKVariance()
+{
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::testGPHIKVariance ===================== " << std::endl;
+
+  double noise (0.01);
+  
+  Config conf;
+  conf.sD( "GPHIKClassifier", "noise", noise );
+  conf.sD( "GPHIKClassifier", "parameter_lower_bound", 1.0 );
+  conf.sD( "GPHIKClassifier", "parameter_upper_bound", 1.0 );
+  conf.sS( "GPHIKClassifier", "varianceApproximation", "approximate_rough");
+  conf.sB( "GPHIKClassifier", "learn_balanced", true);
+  conf.sB( "GPHIKClassifier", "uncertaintyPredictionForClassification", true);
+  
+  GPHIKClassifierNICE classifier ( &conf );
+  
+  Config confVarApproxQuant(conf);
+  confVarApproxQuant.sB( "GPHIKClassifier", "use_quantization", true );
+  GPHIKClassifierNICE classifierQuant ( &confVarApproxQuant );  
+
+  Config confVarApproxFine1(conf);
+  confVarApproxFine1.sS( "GPHIKClassifier", "varianceApproximation", "approximate_fine");  
+  confVarApproxFine1.sI( "GPHIKClassifier", "nrOfEigenvaluesToConsiderForVarApprox", 1);
+  
+  GPHIKClassifierNICE classifierVarApproxFine1 ( &confVarApproxFine1 );  
+
+  Config confVarApproxFine2(conf);
+  confVarApproxFine2.sS( "GPHIKClassifier", "varianceApproximation", "approximate_fine");    
+  confVarApproxFine2.sI( "GPHIKClassifier", "nrOfEigenvaluesToConsiderForVarApprox", 2);
+  
+  GPHIKClassifierNICE classifierVarApproxFine2 ( &confVarApproxFine2 );    
+  
+  Config confExact(conf);
+  confExact.sS( "GPHIKClassifier", "varianceApproximation", "exact");   
+  
+  GPHIKClassifierNICE classifierVarExact ( &confExact );
+  
+  NICE::Matrix mX;
+  NICE::Vector vY;
+  NICE::Vector vY_multi;
+
+  ifstream ifs ("toyExample2.data", ios::in);
+  CPPUNIT_ASSERT ( ifs.good() );
+  ifs >> mX;
+  ifs >> vY;
+  ifs >> vY_multi;
+  ifs.close();
+
+  if (verbose)
+  {
+    std::cerr << "data loaded: mX" << std::endl;
+    std::cerr << mX << std::endl;
+    std::cerr << "vY: " << std::endl;
+    std::cerr << vY << std::endl;
+    std::cerr << "vY_multi: " << std::endl;
+    std::cerr << vY_multi << std::endl;
+  }
+  
+  Examples examples;
+
+  for ( uint i = 0 ; i < vY.size() ; i++ )
+    if ( i % 2 == 0 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      examples.push_back ( pair<int, Example> ( vY_multi[i], example ) );
+    }
+
+  FeaturePool fp; // will be ignored
+
+  if ( verbose )
+    std::cerr << "preparation for variance testing done." << std::endl;
+
+  if ( verbose ) 
+    std::cerr << "learning for variance testing ..." << std::endl;
+  classifier.train ( fp, examples ); 
+  classifierQuant.train ( fp, examples );
+  classifierVarApproxFine1.train ( fp, examples ); 
+  classifierVarApproxFine2.train ( fp, examples ); 
+  classifierVarExact.train ( fp, examples ); 
+
+  if ( verbose )
+    std::cerr << "testing for variance testing ..." << std::endl;
+  
+  for ( uint i = 0 ; i < vY_multi.size() ; i++ )
+    if ( i % 2 == 1 )
+    {
+      Example example;
+      example.svec = new SparseVector;
+      example.svec->setDim(3);
+      example.svec->set ( 0, mX(i,0) );
+      example.svec->set ( 1, mX(i,1) );
+      example.svec->set ( 2, 1.0-mX(i,0)-mX(i,1) );
+      ClassificationResult r = classifier.classify ( example );
+      ClassificationResult rQuant = classifierQuant.classify ( example );
+      ClassificationResult rVarApproxFine1 = classifierVarApproxFine1.classify ( example );
+      ClassificationResult rVarApproxFine2 = classifierVarApproxFine2.classify ( example );
+      ClassificationResult rExact = classifierVarExact.classify ( example );
+      
+      if (verbose)
+      {
+        std::cerr << "approxUnc: " << r.uncertainty << " approxUncQuant: " << rQuant.uncertainty<< " approxUncFine1: " << rVarApproxFine1.uncertainty << " approxUncFine2: " << rVarApproxFine2.uncertainty << " exactUnc: " << rExact.uncertainty << std::endl;
+      }
+
+      CPPUNIT_ASSERT ( r.uncertainty <=  (1.0 + noise) ); //using the "standard" HIK, this is the upper bound
+      CPPUNIT_ASSERT ( r.uncertainty >  rVarApproxFine1.uncertainty);
+      CPPUNIT_ASSERT ( rQuant.uncertainty >  rVarApproxFine1.uncertainty);
+      CPPUNIT_ASSERT ( rVarApproxFine1.uncertainty >  rVarApproxFine2.uncertainty);
+      CPPUNIT_ASSERT ( rVarApproxFine2.uncertainty >  rExact.uncertainty);
+      
+    } 
+
+  examples.clean();  
+  
+  
+  if (verboseStartEnd)
+    std::cerr << "================== TestGPHIKClassifier::testGPHIKVariance done ===================== " << std::endl;
+  
+}
+
+#endif

+ 31 - 0
classifier/fpclassifier/tests/TestGPHIKClassifier.h

@@ -0,0 +1,31 @@
+#ifndef _TESTGPHIKCLASSIFIER_H
+#define _TESTGPHIKCLASSIFIER_H
+
+#include <cppunit/extensions/HelperMacros.h>
+
+/**
+ * CppUnit-Testcase. 
+ */
+class TestGPHIKClassifier : public CppUnit::TestFixture {
+
+    CPPUNIT_TEST_SUITE( TestGPHIKClassifier );
+    
+    CPPUNIT_TEST(testGPHIKClassifier);
+    CPPUNIT_TEST(testGPHIKVariance);
+//     CPPUNIT_TEST(testGPHIKIncrementalLearning);
+    
+    CPPUNIT_TEST_SUITE_END();
+  
+ private:
+ 
+ public:
+    void setUp();
+    void tearDown();
+
+    void testGPHIKClassifier();
+    void testGPHIKVariance();
+//     void testGPHIKIncrementalLearning();
+
+};
+
+#endif // _TESTFASTHIK_H

BIN
classifier/fpclassifier/tests/sparse20x30matrixM.mat


BIN
classifier/fpclassifier/tests/sparse3x3matrixA.mat


+ 42 - 0
classifier/fpclassifier/tests/toyExample1.data

@@ -0,0 +1,42 @@
+39 x 2
+0.1394    0.3699
+0.1210    0.3260
+0.1164    0.2588
+0.1210    0.2032
+0.1417    0.1886
+0.1624    0.2325
+0.1624    0.3319
+0.1509    0.3114
+0.1417    0.2412
+0.1417    0.2763
+0.1279    0.3173
+0.3537    0.3582
+0.3306    0.3056
+0.3306    0.2471
+0.3376    0.2061
+0.3583    0.1740
+0.3698    0.1564
+0.3790    0.2558
+0.3744    0.3173
+0.3698    0.3406
+0.3583    0.2646
+0.3629    0.1944
+0.3468    0.3173
+0.3329    0.2588
+0.3514    0.1974
+0.2224    0.3436
+0.2270    0.3348
+0.2293    0.2675
+0.2339    0.2237
+0.2316    0.1623
+0.2408    0.1857
+0.2615    0.2763
+0.2638    0.3436
+0.2592    0.3904
+0.2477    0.4284
+0.2224    0.3582
+0.2177    0.2909
+0.2224    0.2178
+0.2500    0.1213
+39 < 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 >
+39 < 0 0 0 0 0 0 0 0 0 0 0 3 3 3 3 3 3 3 3 3 3 3 3 3 3 1 1 1 1 1 1 1 1 1 1 1 1 1 1 >

+ 9 - 0
classifier/fpclassifier/tests/toyExample2.data

@@ -0,0 +1,9 @@
+6 x 2
+0.1    0.3
+0.1    0.2
+0.3    0.3
+0.2    0.2
+0.4    0.1
+0.1    0.5
+6 < 0 0 0 1 1 1 >
+6 < 0 0 3 3 1 1 >

Файлын зөрүү хэтэрхий том тул дарагдсан байна
+ 1502 - 0
classifier/fpclassifier/tests/toyExampleLargeLargeScale.data


+ 604 - 0
classifier/fpclassifier/tests/toyExampleLargeScale.data

@@ -0,0 +1,604 @@
+600 x 2
+0.342689 0.175671 
+0.30934 0.268245 
+0.283338 0.31431 
+0.322194 0.211048 
+0.27985 0.217818 
+0.253954 0.195404 
+0.435345 0.125535 
+0.312486 0.137852 
+0.268998 0.2357 
+0.254516 0.213503 
+0.315574 0.130796 
+0.39208 0.178418 
+0.262966 0.128216 
+0.258793 0.151603 
+0.32426 0.234761 
+0.292135 0.138279 
+0.331166 0.145197 
+0.3395 0.169078 
+0.283081 0.134257 
+0.2829 0.147042 
+0.257692 0.28673 
+0.31662 0.209292 
+0.274172 0.194507 
+0.368124 0.127353 
+0.378173 0.236568 
+0.313633 0.166162 
+0.308659 0.154215 
+0.307818 0.180172 
+0.306022 0.194493 
+0.256317 0.182305 
+0.253279 0.138247 
+0.286522 0.154934 
+0.343294 0.296202 
+0.435882 0.149799 
+0.266064 0.141986 
+0.362818 0.130809 
+0.424555 0.154075 
+0.312223 0.176338 
+0.346151 0.185167 
+0.303702 0.224405 
+0.250913 0.27087 
+0.278182 0.187886 
+0.305441 0.161418 
+0.390785 0.176948 
+0.419366 0.139879 
+0.298091 0.134676 
+0.311699 0.242829 
+0.293336 0.238481 
+0.349461 0.179128 
+0.294253 0.191256 
+0.255692 0.154904 
+0.273268 0.193811 
+0.376241 0.206254 
+0.329721 0.17215 
+0.331964 0.234789 
+0.335461 0.186645 
+0.296782 0.158248 
+0.368493 0.148484 
+0.255566 0.188169 
+0.343617 0.135276 
+0.252996 0.204645 
+0.285394 0.333677 
+0.313484 0.175742 
+0.250342 0.203408 
+0.266606 0.188748 
+0.283449 0.129172 
+0.340621 0.179734 
+0.315654 0.199744 
+0.27226 0.134784 
+0.296711 0.185527 
+0.253752 0.198492 
+0.257381 0.257684 
+0.346152 0.220506 
+0.36263 0.183317 
+0.278849 0.181596 
+0.301625 0.247397 
+0.318059 0.23282 
+0.271193 0.143659 
+0.40265 0.205326 
+0.457977 0.223787 
+0.277921 0.132572 
+0.44805 0.266026 
+0.292541 0.133553 
+0.320695 0.152126 
+0.293894 0.132603 
+0.310329 0.158675 
+0.308961 0.228526 
+0.310193 0.201196 
+0.357398 0.276934 
+0.362411 0.134546 
+0.252874 0.249074 
+0.323796 0.231816 
+0.258442 0.173894 
+0.343986 0.134667 
+0.356016 0.163639 
+0.322109 0.210639 
+0.28522 0.223836 
+0.396437 0.198424 
+0.283134 0.21192 
+0.279188 0.215173 
+0.260586 0.22736 
+0.329615 0.19164 
+0.339912 0.133774 
+0.257242 0.151432 
+0.353614 0.163562 
+0.332978 0.182046 
+0.302671 0.248665 
+0.259309 0.151224 
+0.318917 0.240108 
+0.344637 0.135684 
+0.256466 0.283143 
+0.356169 0.209122 
+0.251218 0.224075 
+0.424779 0.215246 
+0.372904 0.150395 
+0.428672 0.125709 
+0.391982 0.182144 
+0.26703 0.265749 
+0.266772 0.152864 
+0.418837 0.250821 
+0.303323 0.235758 
+0.311233 0.15944 
+0.390081 0.292144 
+0.289179 0.154131 
+0.269899 0.233753 
+0.292143 0.269953 
+0.389615 0.181187 
+0.281855 0.168289 
+0.355694 0.130023 
+0.258038 0.191685 
+0.322198 0.160255 
+0.265639 0.205397 
+0.266359 0.195618 
+0.291999 0.161498 
+0.287761 0.170072 
+0.264713 0.332262 
+0.294721 0.140154 
+0.273594 0.165844 
+0.310086 0.169887 
+0.341029 0.225881 
+0.316856 0.137035 
+0.300842 0.221668 
+0.301447 0.210899 
+0.292541 0.135141 
+0.282796 0.135598 
+0.267783 0.151061 
+0.461684 0.192769 
+0.311754 0.238481 
+0.252301 0.171746 
+0.370648 0.194599 
+0.363942 0.159229 
+0.353153 0.187895 
+0.343755 0.214295 
+0.35249 0.132681 
+0.321514 0.191171 
+0.32338 0.135597 
+0.365625 0.141555 
+0.33572 0.236221 
+0.255242 0.240287 
+0.272454 0.25177 
+0.260317 0.137604 
+0.293878 0.138076 
+0.262748 0.191504 
+0.329031 0.143135 
+0.338375 0.250212 
+0.345667 0.147506 
+0.309146 0.198383 
+0.282595 0.295251 
+0.262683 0.15159 
+0.296848 0.163558 
+0.264113 0.274616 
+0.338641 0.211817 
+0.259174 0.264645 
+0.330357 0.20687 
+0.353817 0.22874 
+0.269664 0.226656 
+0.252154 0.148463 
+0.366193 0.150144 
+0.256898 0.245194 
+0.304303 0.183618 
+0.335466 0.151312 
+0.262861 0.200441 
+0.262813 0.252586 
+0.313346 0.194787 
+0.289579 0.247262 
+0.286535 0.23699 
+0.310318 0.142124 
+0.341106 0.206294 
+0.273167 0.156972 
+0.269453 0.187743 
+0.355513 0.183233 
+0.263025 0.199449 
+0.313509 0.331514 
+0.311078 0.252023 
+0.281887 0.26323 
+0.255329 0.173521 
+0.300729 0.214255 
+0.286228 0.136099 
+0.299626 0.157784 
+0.271569 0.22316 
+0.300825 0.303776 
+0.27322 0.25126 
+0.176006 0.402724 
+0.226378 0.348555 
+0.168946 0.262155 
+0.139945 0.341302 
+0.141302 0.305834 
+0.15167 0.264065 
+0.13236 0.287971 
+0.259065 0.450122 
+0.167671 0.301213 
+0.232472 0.315405 
+0.318855 0.278831 
+0.149421 0.336895 
+0.167089 0.266261 
+0.125286 0.322987 
+0.186744 0.359308 
+0.181219 0.298146 
+0.162008 0.412922 
+0.142068 0.288868 
+0.20133 0.317385 
+0.152729 0.340693 
+0.156914 0.393993 
+0.151577 0.271511 
+0.137218 0.435257 
+0.135001 0.288495 
+0.233009 0.308706 
+0.253521 0.278079 
+0.126533 0.327627 
+0.129093 0.344601 
+0.271354 0.292011 
+0.228235 0.290139 
+0.213721 0.357127 
+0.152746 0.388868 
+0.137812 0.376055 
+0.247148 0.391889 
+0.199338 0.316814 
+0.19992 0.434137 
+0.265019 0.338816 
+0.138767 0.355017 
+0.139752 0.313471 
+0.217796 0.265376 
+0.152899 0.257636 
+0.248653 0.313653 
+0.154939 0.31371 
+0.235854 0.259526 
+0.165171 0.300912 
+0.246794 0.338431 
+0.203588 0.363351 
+0.155485 0.377965 
+0.211843 0.290398 
+0.306505 0.385808 
+0.261773 0.398547 
+0.194004 0.282203 
+0.176261 0.26052 
+0.188294 0.343489 
+0.234243 0.430868 
+0.181933 0.355282 
+0.170154 0.350051 
+0.161818 0.263494 
+0.302773 0.265246 
+0.168825 0.310823 
+0.164394 0.423268 
+0.29166 0.35488 
+0.271975 0.386961 
+0.296484 0.309649 
+0.196042 0.314222 
+0.145605 0.298324 
+0.255544 0.452838 
+0.189474 0.312347 
+0.176208 0.272894 
+0.16492 0.380216 
+0.187287 0.414524 
+0.178578 0.294622 
+0.278798 0.27663 
+0.132288 0.296908 
+0.254925 0.33015 
+0.350185 0.258513 
+0.16647 0.387784 
+0.155536 0.261762 
+0.31289 0.421124 
+0.1639 0.278125 
+0.299235 0.435447 
+0.126134 0.307695 
+0.163839 0.313053 
+0.143585 0.53421 
+0.162566 0.331135 
+0.220753 0.256421 
+0.219454 0.4336 
+0.19769 0.37137 
+0.131795 0.403685 
+0.180282 0.261803 
+0.196382 0.262449 
+0.20367 0.318381 
+0.130772 0.333474 
+0.180841 0.299823 
+0.214484 0.290828 
+0.138715 0.341963 
+0.251411 0.39227 
+0.125156 0.30578 
+0.266808 0.337032 
+0.240964 0.331971 
+0.175375 0.294612 
+0.179172 0.366302 
+0.147287 0.296443 
+0.164014 0.261311 
+0.273203 0.254742 
+0.136849 0.28521 
+0.213123 0.34695 
+0.173496 0.325799 
+0.292193 0.255454 
+0.138616 0.33484 
+0.25335 0.300546 
+0.158688 0.311034 
+0.145169 0.361547 
+0.128574 0.270011 
+0.15352 0.26367 
+0.159877 0.378762 
+0.140396 0.433171 
+0.133033 0.290889 
+0.163508 0.271152 
+0.210289 0.291615 
+0.14189 0.280736 
+0.149909 0.292447 
+0.180142 0.266672 
+0.144982 0.277738 
+0.159478 0.274755 
+0.164206 0.442762 
+0.178133 0.262889 
+0.166155 0.348706 
+0.290175 0.379262 
+0.154984 0.394628 
+0.250925 0.259417 
+0.141829 0.286385 
+0.173571 0.32318 
+0.155138 0.334199 
+0.19025 0.284642 
+0.132157 0.273714 
+0.169887 0.327512 
+0.231932 0.328859 
+0.163281 0.304052 
+0.145319 0.36004 
+0.144163 0.303037 
+0.158192 0.259722 
+0.198438 0.331068 
+0.127219 0.323939 
+0.155833 0.30954 
+0.190242 0.28389 
+0.199135 0.277733 
+0.321694 0.453193 
+0.141441 0.268926 
+0.281311 0.338708 
+0.189104 0.267739 
+0.133845 0.310823 
+0.209767 0.418156 
+0.297319 0.297564 
+0.161189 0.259427 
+0.213576 0.457596 
+0.270751 0.290435 
+0.201792 0.389826 
+0.135373 0.254834 
+0.133443 0.307913 
+0.146304 0.263914 
+0.254784 0.28866 
+0.205916 0.275338 
+0.196961 0.277155 
+0.239999 0.304274 
+0.172131 0.28929 
+0.145521 0.255641 
+0.25942 0.282277 
+0.167205 0.260999 
+0.169453 0.345352 
+0.255941 0.301047 
+0.264722 0.378455 
+0.133553 0.308037 
+0.137054 0.309238 
+0.20074 0.274192 
+0.250793 0.336116 
+0.162476 0.296901 
+0.137098 0.250421 
+0.193241 0.277141 
+0.185979 0.273677 
+0.17511 0.379876 
+0.149684 0.265748 
+0.225099 0.317336 
+0.132403 0.250674 
+0.13283 0.294247 
+0.158449 0.338396 
+0.252054 0.266546 
+0.154258 0.287316 
+0.223787 0.363484 
+0.160883 0.270353 
+0.152975 0.283687 
+0.237612 0.267854 
+0.18717 0.29144 
+0.174165 0.34938 
+0.165426 0.355092 
+0.287473 0.27884 
+0.128887 0.361068 
+0.179211 0.299544 
+0.215031 0.155091 
+0.142583 0.193322 
+0.276808 0.171428 
+0.1541 0.183927 
+0.194681 0.127557 
+0.128295 0.150629 
+0.235294 0.134568 
+0.201284 0.162832 
+0.314834 0.212242 
+0.142952 0.303737 
+0.195 0.152865 
+0.287761 0.163026 
+0.156109 0.155853 
+0.20319 0.275679 
+0.154476 0.216572 
+0.141193 0.151162 
+0.178573 0.150035 
+0.289051 0.328297 
+0.174799 0.175858 
+0.166596 0.15483 
+0.248603 0.15139 
+0.189713 0.18169 
+0.256645 0.128374 
+0.137268 0.213468 
+0.152469 0.125282 
+0.178565 0.209226 
+0.170197 0.194244 
+0.205242 0.14935 
+0.197247 0.173981 
+0.222782 0.185638 
+0.255122 0.138357 
+0.137221 0.181269 
+0.162759 0.136556 
+0.126264 0.173721 
+0.250943 0.187721 
+0.153073 0.14711 
+0.219836 0.248307 
+0.190877 0.288343 
+0.210659 0.223544 
+0.162835 0.133229 
+0.349274 0.263972 
+0.191313 0.167455 
+0.14183 0.183345 
+0.171238 0.243158 
+0.236826 0.155454 
+0.192282 0.141581 
+0.155562 0.137083 
+0.168371 0.216514 
+0.207958 0.286036 
+0.12849 0.227428 
+0.140926 0.162835 
+0.159604 0.134924 
+0.316663 0.133871 
+0.150814 0.125524 
+0.133106 0.196074 
+0.149622 0.144502 
+0.15218 0.196792 
+0.1625 0.220862 
+0.265263 0.279839 
+0.192861 0.147449 
+0.275367 0.136736 
+0.125005 0.217594 
+0.157708 0.130746 
+0.137828 0.2188 
+0.205963 0.284573 
+0.314706 0.265755 
+0.223344 0.184088 
+0.23382 0.250284 
+0.213944 0.131451 
+0.298037 0.21233 
+0.161291 0.127927 
+0.141618 0.222095 
+0.139736 0.286058 
+0.220316 0.175143 
+0.231751 0.134027 
+0.169706 0.258055 
+0.260142 0.143679 
+0.217887 0.240266 
+0.131439 0.174702 
+0.214523 0.15396 
+0.1768 0.227648 
+0.224522 0.230168 
+0.16444 0.13183 
+0.187441 0.266578 
+0.150256 0.169155 
+0.224854 0.163051 
+0.258784 0.161583 
+0.210855 0.186293 
+0.125887 0.30692 
+0.131581 0.155051 
+0.146777 0.145637 
+0.351453 0.17521 
+0.212764 0.187016 
+0.191882 0.18859 
+0.188817 0.224918 
+0.150217 0.141471 
+0.296471 0.317944 
+0.227911 0.172533 
+0.254149 0.182712 
+0.182149 0.212013 
+0.178375 0.191318 
+0.148463 0.174887 
+0.216658 0.261188 
+0.127068 0.215572 
+0.181236 0.211954 
+0.171546 0.137991 
+0.228025 0.188707 
+0.140536 0.254658 
+0.249636 0.130417 
+0.182512 0.229795 
+0.212421 0.228258 
+0.165297 0.137963 
+0.298921 0.147804 
+0.235353 0.280178 
+0.289942 0.15012 
+0.146599 0.23232 
+0.142329 0.161157 
+0.270889 0.212884 
+0.163608 0.134177 
+0.156306 0.230362 
+0.187909 0.144162 
+0.253675 0.15226 
+0.277975 0.241759 
+0.132958 0.140637 
+0.132167 0.281881 
+0.14486 0.179008 
+0.14129 0.173667 
+0.13792 0.196095 
+0.144878 0.181669 
+0.270531 0.275731 
+0.220914 0.170835 
+0.166041 0.191592 
+0.169549 0.195291 
+0.291418 0.174062 
+0.132584 0.211085 
+0.180231 0.238647 
+0.145224 0.208966 
+0.372008 0.140979 
+0.129183 0.246742 
+0.214086 0.129494 
+0.157116 0.270378 
+0.141035 0.149166 
+0.162474 0.246944 
+0.13349 0.206489 
+0.132278 0.182272 
+0.216111 0.187533 
+0.220175 0.300967 
+0.167512 0.145929 
+0.168184 0.321369 
+0.133267 0.234592 
+0.229341 0.131162 
+0.257111 0.218668 
+0.333441 0.333555 
+0.133784 0.186297 
+0.287145 0.18554 
+0.139222 0.142987 
+0.150074 0.223927 
+0.214144 0.1276 
+0.25632 0.226794 
+0.156132 0.185504 
+0.159565 0.135037 
+0.196861 0.22244 
+0.211956 0.176154 
+0.148823 0.188648 
+0.203664 0.150215 
+0.218453 0.216136 
+0.262688 0.1386 
+0.186142 0.217442 
+0.148249 0.21515 
+0.199327 0.125026 
+0.182995 0.187074 
+0.196654 0.246484 
+0.224754 0.176581 
+0.130524 0.173274 
+0.177737 0.137875 
+0.187153 0.126132 
+0.178623 0.232132 
+0.187313 0.153289 
+0.155405 0.143394 
+0.218375 0.326502 
+0.137907 0.187893 
+0.149386 0.260504 
+0.193591 0.134313 
+0.239484 0.221013 
+0.175538 0.146035 
+0.197115 0.160234 
+0.175092 0.211225 
+0.137077 0.129546 
+0.172193 0.304747 
+0.167678 0.208687 
+0.267804 0.163603 
+0.154224 0.12527 
+0.163461 0.150108 
+0.148395 0.22809 
+0.24221 0.142793 
+0.210785 0.228961 
+0.160274 0.131187 
+0.250532 0.191618 
+0.184075 0.192361 
+0.211521 0.193562 
+
+600 < 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 >
+600 < 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 >

Энэ ялгаанд хэт олон файл өөрчлөгдсөн тул зарим файлыг харуулаагүй болно