瀏覽代碼

fixed NICE:isNaN call

unknown 12 年之前
父節點
當前提交
6b542a3d5e

+ 1 - 1
cbaselib/LabeledSet.cpp

@@ -329,7 +329,7 @@ void LabeledSetVector::restoreRAW ( istream & is )
       return;
 
     for ( int k = 0 ; k < dimension ; k++ )
-      if ( isnan ( data[k] ) ) {
+	  if ( NICE::isNaN ( data[k] ) ) {
         cerr << "WARNING: nan's found !!" << endl;
         data[k] = 0.0;
       }

+ 1 - 1
classifier/fpclassifier/boosting/FPCFullSearch.cpp

@@ -130,7 +130,7 @@ void FPCFullSearch::train ( FeaturePool & fp, Examples & examples )
 	    fprintf (stderr, "w %f w0 %f w1 %f cc %f\n", w, w0, w1, current_value );
 	    #endif
 
-	    assert (! isnan(current_value));
+	    assert (! NICE::isNaN(current_value));
 
 	    if ( (i != values.begin()) // do not check at the begin
 		 #ifdef CHECK_AT_CLASS_BOUNDARIES_ONLY

+ 1 - 1
classifier/fpclassifier/randomforest/DTBStandard.cpp

@@ -178,7 +178,7 @@ DecisionNode *DTBStandard::buildnode_allfeatures ( const FeaturePool & features,
 	{
 	    int current_class = k->second;
 	    double current_value = k->first;
-	    assert (! isnan(current_value));
+	    assert (! NICE::isNaN(current_value));
 	    
 	    double m = weight - weight_old;
 	    l = weight_old;

+ 2 - 2
classifier/kernelclassifier/LaplaceApproximation.cpp

@@ -119,7 +119,7 @@ void LaplaceApproximation::approximate ( KernelData *kernelData, const Vector &
 		for ( uint i = 0 ; i < mode.size(); i++ )
 			loglikelihood += likelihoodFunction->logLike ( y[i], mode[i] );
 
-		if ( isnan(loglikelihood) )
+		if ( NICE::isNaN(loglikelihood) )
 			fthrow(Exception, "Log-Likelihood p(y|f) could not be calculated numerically...check your parameters!");
 
 		if ( isinf(loglikelihood) == 1 )
@@ -139,7 +139,7 @@ void LaplaceApproximation::approximate ( KernelData *kernelData, const Vector &
 
 		// compute objective
 		objective = - 0.5 * mode.scalarProduct(a);
-		if ( isnan(objective) )
+		if ( NICE::isNaN(objective) )
 			fthrow(Exception, "Objective function of Laplace-Approximation could not be calculated numerically...check your parameters!");
 
 		objective += loglikelihood;

+ 2 - 2
classifier/vclassifier/VCNearestNeighbour.cpp

@@ -61,7 +61,7 @@ ClassificationResult VCNearestNeighbour::classify ( const NICE::Vector & x ) con
 
 	double distance = distancefunc->calculate ( x, y );
 
-	if ( isnan(distance) )
+	if ( NICE::isNaN(distance) )
 	{
 	    fprintf (stderr, "VCNearestNeighbour::classify: NAN value found !!\n");
 	    cerr << x << endl;
@@ -122,7 +122,7 @@ void VCNearestNeighbour::teach ( int classno, const NICE::Vector & x )
 {
     fprintf (stderr, "teach!\n");
     for ( size_t i = 0 ; i < x.size() ; i++ )
-	if ( isnan(x[i]) ) 
+	if ( NICE::isNaN(x[i]) ) 
 	{
 	    fprintf (stderr, "There is a NAN value in within this vector: x[%d] = %f\n", (int)i, x[i]);
 	    cerr << x << endl;

+ 2 - 2
features/localfeatures/LFSiftPP.cpp

@@ -129,7 +129,7 @@ int LFSiftPP::extractFeatures ( const NICE::Image & img, VVector & features,
 				sift.computeKeypointDescriptor ( descr_pt, *iter, angles[i] ); 		
 				for ( int j = 0 ; j < descr_size ; j++ )
 				{
-					if ( isnan(descr_pt[j]) ) {
+					if ( NICE::isNaN(descr_pt[j]) ) {
 					fprintf (stderr, "Descriptor with NAN values !!\n");
 					exit(-1);
 					} else {
@@ -160,7 +160,7 @@ int LFSiftPP::extractFeatures ( const NICE::Image & img, VVector & features,
 					sift.computeKeypointDescriptor ( descr_pt, *iter, angle ); 		
 					for ( int j = 0 ; j < descr_size ; j++ )
 					{
-						if ( isnan(descr_pt[j]) ) {
+						if ( NICE::isNaN(descr_pt[j]) ) {
 							fprintf (stderr, "Descriptor with NAN values !!\n");
 							exit(-1);
 						} else {

+ 2 - 2
math/cluster/KMeansHeuristic.cpp

@@ -62,7 +62,7 @@ int KMeansHeuristic::robust_prototypes(const VVector &features, VVector &prototy
 		for (VVector::iterator i = prototypes.begin(); i != prototypes.end(); i++, m++)
 		{
 			NICE::Vector & p = *i;
-			if (isnan(p[0]))
+			if (NICE::isNaN(p[0]))
 			{
 				continue;
 			}
@@ -120,7 +120,7 @@ int KMeansHeuristic::robust_prototypes(const VVector &features, VVector &prototy
 			{
 				if (clusterassign[a] == 1)
 				{
-					if (isnan(features[a][0]))
+					if (NICE::isNaN(features[a][0]))
 						continue;
 
 					p += features[a];

+ 6 - 6
math/topics/PLSA.cpp

@@ -61,12 +61,12 @@ double PLSA::computeLikelihood ( const double *counts,
 	for ( int j = 0 ; j < n ; j++ ) // words
 	{
 	    double pdw = 0.0;
-	    assert ( ! isnan(counts[i*n+j]) );
-	    assert ( ! isnan(pd[i]) );
+	    assert ( ! NICE::isNaN(counts[i*n+j]) );
+	    assert ( ! NICE::isNaN(pd[i]) );
 	    for ( int k = 0 ; k < m ; k++ )
 	    {
-		assert ( ! isnan(pz_d[k*d + i]) );
-		assert ( ! isnan(pw_z[k*n + j]) );
+		assert ( ! NICE::isNaN(pz_d[k*d + i]) );
+		assert ( ! NICE::isNaN(pw_z[k*n + j]) );
 		pdw += pz_d[k*d+i] * pw_z[k*n+j];
 	    }
 	    
@@ -90,8 +90,8 @@ double PLSA::computePerplexity ( const double *counts,
 	    double pdw = 0.0;
 	    for ( int k = 0 ; k < m ; k++ )
 	    {
-		assert ( ! isnan(pz_d[k*d + i]) );
-		assert ( ! isnan(pw_z[k*n + j]) );
+		assert ( ! NICE::isNaN(pz_d[k*d + i]) );
+		assert ( ! NICE::isNaN(pw_z[k*n + j]) );
 		pdw += pz_d[k*d+i] * pw_z[k*n+j];
 	    }
 	    

+ 1 - 1
regression/gpregression/GPRegressionOptimizationProblem.cpp

@@ -106,7 +106,7 @@ double GPRegressionOptimizationProblem::computeObjective()
 	
 	loglikelihood += y.size() * 0.5 * this->kernelData->getLogDetKernelMatrix();
 
-	if ( isnan(loglikelihood) )
+	if ( NICE::isNaN(loglikelihood) )
 	{
 		if ( verbose )
 			cerr << "GPRegressionOptimizationProblem: loglikelihood is undefined (logdet=" << this->kernelData->getLogDetKernelMatrix() << ")" << endl;