/** * @file GMM.h * @brief Gaussian Mixture Model based on article{Calinon07SMC, title="On Learning, Representing and Generalizing a Task in a Humanoid Robot", author="S. Calinon and F. Guenter and A. Billard", journal="IEEE Transactions on Systems, Man and Cybernetics, Part B. Special issue on robot learning by observation, demonstration and imitation", year="2007", volume="37", number="2", pages="286--298" } * @author Björn Fröhlich * @date 05/14/2009 */ #ifndef GMMINCLUDE #define GMMINCLUDE #include "core/vector/VectorT.h" #include "core/vector/MatrixT.h" #include "vislearning/cbaselib/MultiDataset.h" #include "vislearning/cbaselib/LocalizationResult.h" #include "vislearning/cbaselib/CachedExample.h" #include "vislearning/cbaselib/Example.h" #include "vislearning/math/cluster/ClusterAlgorithm.h" #include "core/vector/VVector.h" namespace OBJREC { class GMM : public ClusterAlgorithm { protected: //! number of gaussians int gaussians; //! dimension of each feature int dim; //! mean vectors NICE::VVector mu; //! sparse sigma vectors NICE::VVector sparse_sigma; //! sparse inverse sigma vectors (if usediag) NICE::VVector sparse_inv_sigma; //! save det_sigma for fast computing std::vector log_det_sigma; //! the configfile const NICE::Config *conf; //! the weight for each gaussian std::vector priors; //! parameters for other GM to compare NICE::VVector mu2; NICE::VVector sparse_sigma2; std::vector priors2; bool comp; //! maximum number of iterations for EM int maxiter; //! how many features to use, use 0 for alle input features int featsperclass; //! for faster computing cdimval = dim*2*PI double cdimval; //! parameter for the map estimation double tau; //! whether to use pyramid initialisation or not bool pyramid; public: /** * simplest constructor */ GMM(); /** * simple constructor * @param _no_classes */ GMM(int _no_classes); /** * standard constructor * @param conf a Configfile * @param _no_classes number of gaussian */ GMM(const NICE::Config *conf, int _no_classes = -1); /** * standard destructor */ ~GMM(){std::cerr << "dadada" << std::endl;}; /** * computes the mixture * @param examples the input features */ void computeMixture(Examples examples); /** * computes the mixture * @param DataSet the input features */ void computeMixture(const NICE::VVector &DataSet); /** * returns the probabilities for each gaussian in a sparse vector * @param vin input vector * @param probs BoV output vector */ void getProbs(const NICE::Vector &vin, SparseVector &probs); /** * returns the probabilities for each gaussian * @param vin input vector * @param probs BoV output vector */ void getProbs(const NICE::Vector &vin, NICE::Vector &probs); /** * returns the fisher score for the gmm * @param vin input vector * @param probs Fisher score output vector */ void getFisher(const NICE::Vector &vin, SparseVector &probs); /** * init the GaussianMixture by selecting randomized mean vectors and using the coovariance of all features * @param DataSet input Matrix */ void initEM(const NICE::VVector &DataSet); /** * alternative for initEM: init the GaussianMixture with a K-Means clustering * @param DataSet input Matrix */ void initEMkMeans(const NICE::VVector &DataSet); /** * performs Expectation Maximization on the Dataset, in order to obtain a nState GMM Dataset is a Matrix(no_classes,nDimensions) * @param DataSet input Matrix * @param gaussians number gaussians to use * @return number of iterations */ int doEM(const NICE::VVector &DataSet, int nbgaussians); /** * Compute log probabilty of vector v for the given state. * * @param Vin * @param state * @return */ double logpdfState(const NICE::Vector &Vin,int state); /** * determine the best mixture for the input feature * @param v input feature * @param bprob probability of the best mixture * @return numer of the best mixture */ int getBestClass(const NICE::Vector &v, double *bprob = NULL); /** * Cluster a given Set of features and return the labels for each feature * @param features input features * @param prototypes mean of the best gaussian * @param weights weight of the best gaussian * @param assignment number of the best gaussian */ void cluster ( const NICE::VVector & features, NICE::VVector & prototypes, std::vector & weights, std::vector & assignment ); /** * save GMM data * @param filename filename */ void saveData(const std::string filename); /** * load GMM data * @param filename filename * @return true if everything works fine */ bool loadData(const std::string filename); /** * return the parameter of the mixture * @param mu * @param sSigma * @param p */ void getParams(NICE::VVector &mean, NICE::VVector &sSigma, std::vector &p); /** * Set the parameters of an other mixture for comparing with this one * @param mean mean vectors * @param sSigma diagonal covariance Matrixs * @param p weights */ void setCompareGM(NICE::VVector mean, NICE::VVector sSigma, std::vector p); /** * probability product kernel * @param sigma1 * @param sigma2 * @param mu1 * @param mu2 * @param p * @return */ double kPPK(NICE::Vector sigma1, NICE::Vector sigma2, NICE::Vector mu1, NICE::Vector mu2, double p); /** * starts a comparison between this Mixture and a other one seted bei "setComparGM" */ double compare(); /** * whether to compare or not * @param c */ void comparing(bool c = true); int getSize(){return gaussians;} }; } // namespace #endif