12345678910111213141516171819202122232425262728293031323334353637383940414243444546 |
- import chainer
- import chainer.functions as F
- import chainer.links as L
- from chainer_addons.models.base import BaseClassifier
- import logging
- class SeparateModelClassifier(BaseClassifier):
- """Classifier, that holds two separate models"""
- def __init__(self, *args, **kwargs):
- super(SeparateModelClassifier, self).__init__(*args, **kwargs)
- with self.init_scope():
- self.init_separate_model()
- def init_separate_model(self):
- assert hasattr(self, "model"), \
- "This classifiert has no \"model\" attribute!"
- if hasattr(self, "separate_model"):
- logging.warn("Global Model already initialized! Skipping further execution!")
- return
- self.separate_model = self.model.copy(mode="copy")
- def loader(self, model_loader):
- def inner(n_classes, feat_size):
- # use the given feature size here
- model_loader(n_classes=n_classes, feat_size=feat_size)
- # use the given feature size first ...
- self.separate_model.reinitialize_clf(
- n_classes=n_classes,
- feat_size=feat_size)
- # then copy model params ...
- self.separate_model.copyparams(self.model)
- # now use the default feature size to re-init the classifier
- self.separate_model.reinitialize_clf(
- n_classes=n_classes,
- feat_size=self.feat_size)
- return inner
|