Skip to content

Commit

Permalink
disable find unused params for speedup
Browse files Browse the repository at this point in the history
  • Loading branch information
mooey5775 committed Oct 18, 2021
1 parent 0903005 commit 76ad49e
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from pytorch_lightning import Trainer, seed_everything
from pytorch_lightning.callbacks import LearningRateMonitor, ModelCheckpoint
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning.plugins import DDPPlugin

from deperceiver.models.backbone import build_backbone
from deperceiver.models.transformer import build_transformer
Expand Down Expand Up @@ -152,6 +153,7 @@ def main(args):
trainer = Trainer(
gpus=args.gpus,
accelerator='ddp',
plugins=[DDPPlugin(find_unused_parameters=False)],
precision=precision,
default_root_dir=args.output_dir,
gradient_clip_val=args.clip_max_norm,
Expand Down

0 comments on commit 76ad49e

Please sign in to comment.