Skip to content
Snippets Groups Projects
Unverified Commit 4171548a authored by Maxim Bonnaerens's avatar Maxim Bonnaerens Committed by GitHub
Browse files

Allow find_unused_parameters to be set in config (#2225)

* Allow find_unused_parameters to be set in config

* Fix typo
parent 2b967af9
No related branches found
No related tags found
No related merge requests found
...@@ -129,10 +129,14 @@ def _dist_train(model, ...@@ -129,10 +129,14 @@ def _dist_train(model,
seed=cfg.seed) for ds in dataset seed=cfg.seed) for ds in dataset
] ]
# put model on gpus # put model on gpus
find_unused_parameters = cfg.get('find_unused_parameters', False)
# Sets the `find_unused_parameters` parameter in
# torch.nn.parallel.DistributedDataParallel
model = MMDistributedDataParallel( model = MMDistributedDataParallel(
model.cuda(), model.cuda(),
device_ids=[torch.cuda.current_device()], device_ids=[torch.cuda.current_device()],
broadcast_buffers=False) broadcast_buffers=False,
find_unused_parameters=find_unused_parameters)
# build runner # build runner
optimizer = build_optimizer(model, cfg.optimizer) optimizer = build_optimizer(model, cfg.optimizer)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment