|
@@ -271,7 +271,7 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- fprintf(stderr, "FPCRandomForests: minimum number of examples: %f (classno: %d)\n", minExamples, minExamplesClassNo);
|
|
|
+ fprintf(stderr, "FPCRandomForests: minimum number of examples: %d (classno: %d)\n", (int)minExamples, minExamplesClassNo);
|
|
|
|
|
|
int featuresCount = (int)(fp.size() * features_per_tree);
|
|
|
fprintf(stderr, "FPCRandomForests: number of features %d\n", (int)fp.size());
|
|
@@ -288,7 +288,7 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
|
|
|
#pragma omp parallel for
|
|
|
for (int k = 0 ; k < number_of_trees ; k++)
|
|
|
{
|
|
|
- fprintf(stderr, "[ -- building tree %d/%d -- ]\n", k + 1, number_of_trees);
|
|
|
+ fprintf(stderr, "FPCRandomForests: [ -- building tree %d/%d -- ]\n", k + 1, number_of_trees);
|
|
|
|
|
|
FeaturePool fp_subset;
|
|
|
Examples examples_subset;
|
|
@@ -357,7 +357,8 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
|
|
|
/******* training of an individual tree ****/
|
|
|
DecisionTree *tree = new DecisionTree(conf, maxClassNo);
|
|
|
|
|
|
- builder->build(*tree, fp_subset, examples_subset, maxClassNo);
|
|
|
+ #pragma omp critical
|
|
|
+ builder->build(*tree, fp_subset, examples_subset, maxClassNo);
|
|
|
|
|
|
/******* prune tree using a simple minimum entropy criterion *****/
|
|
|
if (minimum_entropy != 0.0)
|