/** 
* @file Bagging.h
* @brief implementation of breimans bagging idea
* @author Erik Rodner
* @date 04/24/2008

*/
#ifndef BaggingINCLUDE
#define BaggingINCLUDE

#ifdef NOVISUAL
#include <vislearning/nice_nonvis.h>
#else
#include <vislearning/nice.h>
#endif

#include "vislearning/classifier/classifierbase/FeaturePoolClassifier.h"
#include "vislearning/features/fpfeatures/FeaturePool.h"


namespace OBJREC {

/** implementation of random set forests */
class Bagging : public FeaturePoolClassifier
{
    protected:
	std::vector<FeaturePoolClassifier *> ensemble;	
    
	int number_of_classifiers;
	double features_per_tree;
	double samples_per_tree;
	bool use_simple_balancing;
	bool weight_examples;
	double minimum_entropy;

	bool memory_efficient;

	const NICE::Config *conf;
	// refactor-nice.pl: check this substitution
	// old: string confsection;
	std::string confsection;
	DecisionTreeBuilder *builder;

    public:
  
	/** train */
	Bagging( const NICE::Config *conf, 
			  // refactor-nice.pl: check this substitution
			  // old: string section );
			  std::string section );
      
	/** simple destructor */
	virtual ~Bagging();

	ClassificationResult classify ( Example & pce );
	int classify_optimize ( Example & pce );
	
	void getLeafNodes ( Example & pce,
			    std::vector<DecisionNode *> & leafNodes,
			    int depth = 100000 );
	
	virtual void train ( FeaturePool & fp,
		     Examples & examples );

	void restore (std::istream & is, int format = 0);
	void store (std::ostream & os, int format = 0) const;
	void clear ();
	
	void indexDescendants ( std::map<DecisionNode *, std::pair<long, int> > & index ) const;

	void resetCounters ();

	const std::vector<DecisionTree *> & getForest () const { return forest; };

	FeaturePoolClassifier *clone () const;
};


} // namespace

#endif