Quellcode durchsuchen

fixed feature size initialization

Dimitri Korsch vor 6 Jahren
Ursprung
Commit
848d496941
1 geänderte Dateien mit 3 neuen und 3 gelöschten Zeilen
  1. 3 3
      finetune/finetuner/base.py

+ 3 - 3
finetune/finetuner/base.py

@@ -141,8 +141,8 @@ class _ModelMixin(abc.ABC):
 				logging.info("Loading pre-trained weights \"{}\"".format(self.weights))
 				logging.info("Loading pre-trained weights \"{}\"".format(self.weights))
 				loader = partial(self.model.load_for_finetune, weights=self.weights)
 				loader = partial(self.model.load_for_finetune, weights=self.weights)
 
 
-
-		feat_size = self.clf.feat_size
+		if hasattr(self.clf, "output_size"):
+			feat_size = self.clf.output_size
 
 
 		if hasattr(self.clf, "loader"):
 		if hasattr(self.clf, "loader"):
 			loader = self.clf.loader(loader)
 			loader = self.clf.loader(loader)
@@ -256,7 +256,7 @@ class _TrainerMixin(abc.ABC):
 			**self.updater_kwargs,
 			**self.updater_kwargs,
 		)
 		)
 		logging.info(" ".join([
 		logging.info(" ".join([
-			f"Using single GPU: {self.device}."
+			f"Using single GPU: {self.device}.",
 			f"{self.updater_cls.__name__} is initialized",
 			f"{self.updater_cls.__name__} is initialized",
 			f"with following kwargs: {_format_kwargs(self.updater_kwargs)}"
 			f"with following kwargs: {_format_kwargs(self.updater_kwargs)}"
 			])
 			])