Skip to content

Commit

Permalink
fix: Fix LRSchedulerWrapper with shared memory address issue.
Browse files Browse the repository at this point in the history
  • Loading branch information
RoyYang0714 committed Dec 15, 2023
1 parent ab4fa89 commit 2cdc7c2
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 4 deletions.
4 changes: 2 additions & 2 deletions vis4d/config/default/pl_trainer.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Default runtime configuration for PyTorch Lightning."""
import inspect

import pytorch_lightning as pl
from lightning import Trainer

from vis4d.config import FieldConfigDict
from vis4d.config.typing import ExperimentConfig
Expand All @@ -12,7 +12,7 @@ def get_default_pl_trainer_cfg(config: ExperimentConfig) -> ExperimentConfig:
pl_trainer = FieldConfigDict()

# PL Trainer arguments
for k, v in inspect.signature(pl.Trainer).parameters.items():
for k, v in inspect.signature(Trainer).parameters.items():
if not k in {"callbacks", "devices", "logger", "strategy"}:
pl_trainer[k] = v.default

Expand Down
1 change: 1 addition & 0 deletions vis4d/engine/callbacks/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ def on_train_epoch_start(
"""Hook to run at the start of a training epoch."""
if self.epoch_based:
self.train_timer.reset()
self.last_step = 0
self._metrics.clear()
elif trainer_state["global_step"] == 0:
self.train_timer.reset()
Expand Down
6 changes: 4 additions & 2 deletions vis4d/engine/optim/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from torch.optim.lr_scheduler import LRScheduler

from vis4d.common.typing import DictStrAny
from vis4d.config import instantiate_classes
from vis4d.config import copy_and_resolve_references, instantiate_classes
from vis4d.config.typing import LrSchedulerConfig


Expand All @@ -30,7 +30,9 @@ def __init__(
steps_per_epoch: int = -1,
) -> None:
"""Initialize LRSchedulerWrapper."""
self.lr_schedulers_cfg = lr_schedulers_cfg
self.lr_schedulers_cfg: list[
LrSchedulerConfig
] = copy_and_resolve_references(lr_schedulers_cfg)
self.lr_schedulers: dict[int, LRSchedulerDict] = {}
super().__init__(optimizer)
self.steps_per_epoch = steps_per_epoch
Expand Down

0 comments on commit 2cdc7c2

Please sign in to comment.