|
@@ -12,56 +12,19 @@ import activeLearningWlinGPprototype
|
|
|
|
|
|
class Classifier(activeLearningWlinGPprototype.ClassifierPrototype):
|
|
|
|
|
|
- def __init__(self,
|
|
|
- sigmaN = 0.00178,
|
|
|
- useDiversity = False,
|
|
|
- useDensity = False,
|
|
|
- useVariance = False,
|
|
|
- loNoise = True,
|
|
|
- configFile=None):
|
|
|
+
|
|
|
+ def __init__(self, sigmaN = 0.00178, loNoise = True, configFile=None):
|
|
|
|
|
|
activeLearningWlinGPprototype.ClassifierPrototype.__init__(self, sigmaN=sigmaN, configFile=configFile)
|
|
|
- self.useDiversity = helperFunctions.getConfig(configFile, 'activeLearning', 'useDiversity', useDiversity, 'bool', True)
|
|
|
- self.useVariance = helperFunctions.getConfig(configFile, 'activeLearning', 'useVariance', useVariance, 'bool', True)
|
|
|
- self.useDensity = helperFunctions.getConfig(configFile, 'activeLearning', 'useDensity', useDensity, 'bool', True)
|
|
|
self.loNoise = helperFunctions.getConfig(configFile, 'activeLearning', 'loNoise', loNoise, 'bool', True)
|
|
|
|
|
|
|
|
|
- def getDensity(self, sim):
|
|
|
- return numpy.sum(sim, axis=1) / float(sim.shape[1])
|
|
|
-
|
|
|
-
|
|
|
- def getDiversity(self, sim):
|
|
|
- return 1.0 / numpy.max(sim, axis=1)
|
|
|
-
|
|
|
-
|
|
|
# x.shape = (number of samples, feat dim)
|
|
|
def calcAlScores(self, x):
|
|
|
|
|
|
loNoise = (self.yUni == -1).any() and self.loNoise
|
|
|
sortedScores = numpy.sort(self.infer(x, loNoise), axis=1)
|
|
|
- alScores = numpy.absolute(sortedScores[:,-1] - sortedScores[:,-2])*(-1.0)
|
|
|
- sim = None
|
|
|
-
|
|
|
- if self.useDensity:
|
|
|
-
|
|
|
- sim = numpy.dot(x, numpy.append(self.X, x, axis=0).T)
|
|
|
- density = self.getDensity(sim)
|
|
|
- alScores = numpy.multiply(alScores, density)
|
|
|
-
|
|
|
- elif self.useDiversity:
|
|
|
-
|
|
|
- if sim is None:
|
|
|
- sim = numpy.dot(x, self.X.T)
|
|
|
- diversity = self.getDiversity(sim)
|
|
|
- else:
|
|
|
- diversity = self.getDiversity(sim[:,:self.X.shape[0]])
|
|
|
-
|
|
|
- alScores = numpy.multiply(alScores, diversity)
|
|
|
-
|
|
|
- elif self.useVariance:
|
|
|
-
|
|
|
- variance = self.calcSigmaF(x)
|
|
|
- alScores = numpy.multiply(alScores, variance)
|
|
|
+ alScores = numpy.abs(sortedScores[:,-1] - sortedScores[:,-2])
|
|
|
|
|
|
- return alScores
|
|
|
+ # since we actually want to select min(scores) instead of max(scores), we have to turn the scores around
|
|
|
+ return numpy.add(numpy.subtract(alScores, numpy.max(alScores))*(-1.0), numpy.finfo(numpy.float32).eps)
|