ソースを参照

updated logging

Dimitri Korsch 4 年 前
コミット
650e3d9851
2 ファイル変更13 行追加11 行削除
  1. 0 1
      cvfinetune/finetuner/mixins/dataset.py
  2. 13 10
      cvfinetune/finetuner/mixins/model.py

+ 0 - 1
cvfinetune/finetuner/mixins/dataset.py

@@ -4,7 +4,6 @@ import logging
 from cvdatasets import AnnotationType
 from cvdatasets.dataset.image import Size
 from cvdatasets.utils import new_iterator
-from functools import partial
 
 
 class _DatasetMixin(abc.ABC):

+ 13 - 10
cvfinetune/finetuner/mixins/model.py

@@ -6,7 +6,6 @@ from chainer import functions as F
 from chainer.optimizer_hooks import Lasso
 from chainer.optimizer_hooks import WeightDecay
 from chainer_addons.functions import smoothed_cross_entropy
-from chainer_addons.models import ModelType
 from chainer_addons.models import PrepareType
 from chainer_addons.training import optimizer
 from chainer_addons.training import optimizer_hooks
@@ -16,7 +15,6 @@ from cvdatasets.utils import pretty_print_dict
 from cvmodelz.models import ModelFactory
 from functools import partial
 from pathlib import Path
-from typing import Callable
 from typing import Tuple
 
 def check_param_for_decay(param):
@@ -66,10 +64,10 @@ class _ModelMixin(abc.ABC):
 			keep_ratio=getattr(opts, "center_crop_on_val", False),
 		)
 
-		logging.info(" ".join([
-			f"Created {self.model.__class__.__name__} model",
-			f"with \"{opts.prepare_type}\" prepare function."
-		]))
+		logging.info(
+			f"Created {self.model.__class__.__name__} model "
+			f" with \"{opts.prepare_type}\" prepare function."
+		)
 
 
 	def init_classifier(self, opts):
@@ -81,10 +79,10 @@ class _ModelMixin(abc.ABC):
 			loss_func=self._loss_func(opts),
 			**kwargs)
 
-		logging.info(" ".join([
-			f"Wrapped the model around {clf_class.__name__}",
-			f"with kwargs: {pretty_print_dict(kwargs)}",
-		]))
+		logging.info(
+			f"Wrapped the model around {clf_class.__name__}"
+			f" with kwargs: {pretty_print_dict(kwargs)}"
+		)
 
 	def _loss_func(self, opts):
 		if getattr(opts, "l1_loss", False):
@@ -114,6 +112,11 @@ class _ModelMixin(abc.ABC):
 			decay=0, gradient_clipping=False, **opt_kwargs
 		)
 
+		logging.info(
+			f"Initialized {self.opt.__class__.__name__} optimizer"
+			f" with initial LR {opts.learning_rate} and kwargs: {pretty_print_dict(opt_kwargs)}"
+		)
+
 		if opts.decay > 0:
 			reg_kwargs = {}
 			if opts.l1_loss: