Skip to content

Commit

Permalink
Fixed issue of logging wrong config
Browse files Browse the repository at this point in the history
  • Loading branch information
BloodAxe committed May 10, 2024
1 parent 9e73792 commit 649e4b9
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions src/super_gradients/training/sg_trainer/sg_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,13 +239,15 @@ def train_from_config(cls, cfg: Union[DictConfig, dict]) -> Tuple[nn.Module, Tup
:return: the model and the output of trainer.train(...) (i.e results tuple)
"""

# TODO: bind checkpoint_run_id
setup_device(
device=core_utils.get_param(cfg, "device"),
multi_gpu=core_utils.get_param(cfg, "multi_gpu"),
num_gpus=core_utils.get_param(cfg, "num_gpus"),
)

# Create resolved config before instantiation
recipe_logged_cfg = {"recipe_config": OmegaConf.to_container(cfg, resolve=True)}

# INSTANTIATE ALL OBJECTS IN CFG
cfg = hydra.utils.instantiate(cfg)

Expand Down Expand Up @@ -283,7 +285,6 @@ def train_from_config(cls, cfg: Union[DictConfig, dict]) -> Tuple[nn.Module, Tup

test_loaders = maybe_instantiate_test_loaders(cfg)

recipe_logged_cfg = {"recipe_config": OmegaConf.to_container(cfg, resolve=True)}
# TRAIN
res = trainer.train(
model=model,
Expand Down

0 comments on commit 649e4b9

Please sign in to comment.