Sfoglia il codice sorgente

added classifier tests

Dimitri Korsch 4 anni fa
parent
commit
f0e2a42af5

+ 8 - 0
cvmodelz/classifiers/__init__.py

@@ -0,0 +1,8 @@
+from cvmodelz.classifiers.base import Classifier
+from cvmodelz.classifiers.separate_model_classifier import SeparateModelClassifier
+
+
+__all__ = [
+	"Classifier",
+	"SeparateModelClassifier",
+]

+ 6 - 2
cvmodelz/classifiers/base.py

@@ -14,8 +14,8 @@ class Classifier(chainer.Chain):
 		loss_func: Callable = F.softmax_cross_entropy,
 		only_head: bool = False,
 		):
-		super(BaseClassifier, self).__init__()
-		self.layer_name = layer_name or model.meta.clf_layer_name
+		super(Classifier, self).__init__()
+		self.layer_name = layer_name or model.clf_layer_name
 		self.loss_func = loss_func
 
 		with self.init_scope():
@@ -34,6 +34,10 @@ class Classifier(chainer.Chain):
 		self.model.disable_update()
 		self.model.clf_layer.enable_update()
 
+	@property
+	def n_classes(self) -> int:
+		return self.model.clf_layer.W.shape[0]
+
 	def loader(self, model_loader: Callable) -> Callable:
 		return model_loader
 

+ 2 - 2
cvmodelz/models/base.py

@@ -31,8 +31,8 @@ class BaseModel(abc.ABC, chainer.Chain):
 		self.meta = ModelInfo()
 
 	@abc.abstractmethod
-	def __call__(self, X, layer_name=None):
-		pass
+	def forward(self, *args, **kwargs):
+		return super(BaseModel, self).forward(*args, **kwargs)
 
 	@abc.abstractproperty
 	def functions(self) -> OrderedDict:

+ 4 - 4
cvmodelz/models/pretrained/base.py

@@ -15,22 +15,22 @@ class PretrainedModelMixin(BaseModel):
 				...
 	"""
 
-	def __init__(self, n_classes: int = 1000, *args, **kwargs):
+	def __init__(self, *args, n_classes: int = 1000, pretrained_model: str = None, **kwargs):
 		from cvmodelz.models import ModelFactory
 
 		if ModelFactory.is_chainer_model(self):
-			kwargs["pretrained_model"] = kwargs.get("pretrained_model", None)
+			kwargs["pretrained_model"] = pretrained_model
 
 		super(PretrainedModelMixin, self).__init__(*args, **kwargs)
 
 		with self.init_scope():
 			self.init_extra_layers(n_classes)
 
-	def __call__(self, X, layer_name=None):
+	def forward(self, X, layer_name=None):
 		assert hasattr(self, "meta"), "Did you forgot to initialize the meta attribute?"
 
 		layer_name = layer_name or self.meta.classifier_layers[-1]
-		caller = super(PretrainedModelMixin, self).__call__
+		caller = super(PretrainedModelMixin, self).forward
 		activations = caller(X, layers=[layer_name])
 
 		if isinstance(activations, dict):

+ 1 - 1
cvmodelz/models/pretrained/inception/inception_v3.py

@@ -55,7 +55,7 @@ class InceptionV3(PretrainedModelMixin, chainer.Chain):
 			classifier_layers=["fc"],
 		)
 
-	def __call__(self, x, layer_name='fc'):
+	def forward(self, x, layer_name='fc'):
 		aux_logit = None
 		for key, funcs in self.functions.items():
 			for func in funcs:

+ 7 - 0
cvmodelz/models/pretrained/resnet.py

@@ -63,6 +63,13 @@ class ResNet35(BaseResNet, chainer.Chain):
 		]
 		return OrderedDict(links)
 
+	def forward(self, x, layer_name=None):
+		for key, funcs in self.functions.items():
+			for func in funcs:
+				x = func(x)
+			if key == layer_name:
+				return x
+
 class ResNet50(BaseResNet, L.ResNet50Layers):
 	n_layers = 50
 

+ 1 - 1
cvmodelz/models/wrapper.py

@@ -63,7 +63,7 @@ class ModelWrapper(BaseModel):
 
 		raise RuntimeError(f"tried to load weights with paths {paths}, but did not succeeed")
 
-	def __call__(self, X, layer_name=None):
+	def forward(self, X, layer_name=None):
 		if layer_name is None:
 			res = self.wrapped(X)
 

+ 37 - 0
tests/classifier_tests.py

@@ -0,0 +1,37 @@
+import numpy as np
+import test_utils
+import unittest
+
+from cvmodelz.classifiers import Classifier
+from cvmodelz.models import ModelFactory
+
+class ClassifierTests(unittest.TestCase):
+
+
+	def new_clf(self, key, **kwargs):
+		model = ModelFactory.new(key, pretrained_model=None)
+		return model, Classifier(model, **kwargs)
+
+	def creation(self, key):
+		model, clf = self.new_clf(key)
+		self.assertIs(clf.model, model)
+
+	def loss_computation(self, key):
+		model, clf = self.new_clf(key)
+
+		in_size = clf.model.meta.input_size
+		X = clf.xp.ones((4, 3, in_size, in_size), dtype=np.float32)
+		y = clf.xp.random.choice(clf.n_classes, size=4)
+
+		loss = clf(X, y)
+		self.assertIsNotNone(loss)
+		self.assertEqual(loss.ndim, 0)
+		self.assertEqual(loss.shape, ())
+
+
+
+test_utils.add_tests(ClassifierTests.creation,
+	model_list=ModelFactory.get_models(["cvmodelz", "chainercv2"]))
+
+test_utils.add_tests(ClassifierTests.loss_computation,
+	model_list=ModelFactory.get_models(["cvmodelz", "chainercv2"]))

+ 1 - 0
tests/main.py

@@ -11,6 +11,7 @@ cwd = Path(__file__).resolve().parent
 sys.path.insert(0, str(cwd.parent))
 
 from model_tests import *
+from classifier_tests import *
 
 with chainer.using_config("train", False):
 	unittest.main()