Explorar el Código

updated requirements and added some missing functions

Dimitri Korsch hace 1 año
padre
commit
45ae9e955a

+ 1 - 1
cvfinetune/_version.py

@@ -1 +1 @@
-__version__ = "0.9.0"
+__version__ = "0.11.0"

+ 2 - 4
cvfinetune/finetuner/mixins/classifier.py

@@ -2,8 +2,8 @@ import abc
 import logging
 
 from chainer import functions as F
-from chainer_addons.functions import smoothed_cross_entropy
 from cvdatasets.utils import pretty_print_dict
+from cvfinetune.training.loss import smoothed_cross_entropy
 from functools import partial
 
 from cvfinetune.finetuner.mixins.base import BaseMixin
@@ -61,8 +61,6 @@ class _ClassifierMixin(BaseMixin):
         if self._label_smoothing > 0:
             assert self._label_smoothing < 1, "Label smoothing factor must be less than 1!"
 
-            return partial(smoothed_cross_entropy,
-                           N=self.n_classes,
-                           eps=self._label_smoothing)
+            return smoothed_cross_entropy(self.n_classes, eps=self._label_smoothing)
 
         return F.softmax_cross_entropy

+ 1 - 1
cvfinetune/finetuner/mixins/model.py

@@ -146,7 +146,7 @@ class _ModelMixin(BaseMixin):
 
         else:
             weights = self._default_weights
-            logging.info(f"Loading custom fine-tuned weights from \"{weights}\"")
+            logging.info(f"Loading default fine-tuned weights from \"{weights}\"")
             return True, weights
 
     @property

+ 9 - 8
cvfinetune/finetuner/mixins/trainer.py

@@ -1,16 +1,12 @@
-import abc
-import gc
 import logging
 import pyaml
 import typing as T
 
 from bdb import BdbQuit
 from chainer.serializers import save_npz
-from chainer.training import extension
 from chainer.training import extensions
 from chainer.training import updaters
 from cvdatasets.utils import pretty_print_dict
-from functools import partial
 from pathlib import Path
 
 
@@ -56,12 +52,15 @@ class _TrainerMixin(BaseMixin):
         return self.no_sacred
 
     def init_experiment(self, *, config: dict):
-        """ creates a sacred experiment that is later used by the trainer's sacred extension """
+        """ creates a sacred experiment that is later used
+            by the trainer's sacred extension
+        """
 
         self.config = config
 
         if self.no_sacred:
-            logging.warning("Default sacred workflow is disabled by the --no_sacred option!")
+            logging.warning("Default sacred workflow is disabled "\
+                "by the --no_sacred option!")
             return
 
         self.ex = Experiment(
@@ -143,7 +142,8 @@ class _TrainerMixin(BaseMixin):
 
         self.save_meta_info()
 
-        logging.info("Snapshotting is {}abled".format("dis" if self.no_snapshot else "en"))
+        logging.info("Snapshotting is {}abled".format(
+            "dis" if self.no_snapshot else "en"))
 
         try:
             self.run_experiment(self.init_eval or self.only_eval)
@@ -163,7 +163,8 @@ class _TrainerMixin(BaseMixin):
         eval_fpath = Path(eval_fpath)
 
         if eval_fpath.exists() and not force:
-            logging.warning(f"Evaluation file (\"{eval_fpath}\") exists already, skipping evaluation")
+            logging.warning(f"Evaluation file (\"{eval_fpath}\") "\
+                "exists already, skipping evaluation")
             return
 
         records = self.evaluator()

+ 2 - 0
cvfinetune/training/loss/__init__.py

@@ -0,0 +1,2 @@
+from cvfinetune.training.loss.smoothed_cross_entropy import SmoothedCrossEntropy
+from cvfinetune.training.loss.smoothed_cross_entropy import smoothed_cross_entropy

+ 21 - 0
cvfinetune/training/loss/smoothed_cross_entropy.py

@@ -0,0 +1,21 @@
+from chainer import functions as F
+
+
+class SmoothedCrossEntropy:
+
+	def __init__(self, n_classes: int, eps: float = 0.1):
+		super(SmoothedCrossEntropy, self).__init__()
+		self.n_classes = n_classes
+		self.eps = eps
+
+	def __call__(self, pred, gt, **kwargs):
+		loss = F.softmax_cross_entropy(pred, gt, **kwargs)
+
+		# -sum[ log( P(k) ) * U ]
+		reg_loss = F.mean(F.sum(F.log_softmax(pred) / self.n_classes, axis=1))
+
+		return (1-self.eps) * loss - self.eps * reg_loss
+
+
+def smoothed_cross_entropy(N: int, eps: float = 0.1):
+	return SmoothedCrossEntropy(n_classes=N, eps=eps)

+ 5 - 3
requirements.txt

@@ -7,12 +7,14 @@ PyYAML~=5.1
 simplejson~=3.14
 sacred~=0.7
 
-chainer>=4.2.0,<8.0
+chainer~=7.8
 chainercv~=0.13
-chainercv2~=0.0
-# cupy-cuda101>=4.2.0,<7.0
+chainercv2
 
 # my own packages
 cvargparse~=0.3
 cvdatasets~=0.9
 chainer_addons~=0.9
+
+wandb
+pymongo