classifier.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. import chainer
  2. import chainer.functions as F
  3. import chainer.links as L
  4. from chainer_addons.models.classifier import Classifier as C
  5. import logging
  6. class Classifier(C):
  7. def __init__(self, *args, **kwargs):
  8. super(Classifier, self).__init__(*args, **kwargs)
  9. assert hasattr(self, "model"), \
  10. "This classifiert has no \"model\" attribute!"
  11. @property
  12. def feat_size(self):
  13. if hasattr(self.model.pool, "output_dim") and self.model.pool.output_dim is not None:
  14. return self.model.pool.output_dim
  15. return self.model.meta.feature_size
  16. class SeparateModelClassifier(Classifier):
  17. """Classifier, that holds two separate models"""
  18. def __init__(self, *args, **kwargs):
  19. super(SeparateModelClassifier, self).__init__(*args, **kwargs)
  20. with self.init_scope():
  21. self.init_separate_model()
  22. def init_separate_model(self):
  23. if hasattr(self, "separate_model"):
  24. logging.warn("Global Model already initialized! Skipping further execution!")
  25. return
  26. self.separate_model = self.model.copy(mode="copy")
  27. def loader(self, model_loader):
  28. def inner(n_classes, feat_size):
  29. # use the given feature size here
  30. model_loader(n_classes=n_classes, feat_size=feat_size)
  31. # use the given feature size first ...
  32. self.separate_model.reinitialize_clf(
  33. n_classes=n_classes,
  34. feat_size=feat_size)
  35. # then copy model params ...
  36. self.separate_model.copyparams(self.model)
  37. # now use the default feature size to re-init the classifier
  38. self.separate_model.reinitialize_clf(
  39. n_classes=n_classes,
  40. feat_size=self.feat_size)
  41. return inner