Skip to content
Snippets Groups Projects
Commit 58adc05b authored by Guo-Hua Wang's avatar Guo-Hua Wang
Browse files

fix bug

parent 168632c7
No related branches found
No related tags found
No related merge requests found
......@@ -80,7 +80,8 @@ def train_detector(model,
if distiller_cfg is None:
optimizer = build_optimizer(model, cfg.optimizer)
else:
optimizer = build_optimizer(model.module.base_parameters(), cfg.optimizer)
#optimizer = build_optimizer(model.module.base_parameters(), cfg.optimizer)
optimizer = build_optimizer(model.base_parameters(), cfg.optimizer)
# use apex fp16 optimizer
if cfg.optimizer_config.get("type", None) and cfg.optimizer_config["type"] == "DistOptimizerHook":
......
import torch.nn as nn
import torch.nn.functional as F
from mmcv.cnn import xavier_init
from ..builder import NECKS
from .fpn import FPN
@NECKS.register_module()
class CBFPN(FPN):
'''
FPN with weight sharing
which support mutliple outputs from cbnet
'''
def forward(self, inputs):
if not isinstance(inputs[0], (list, tuple)):
inputs = [inputs]
if self.training:
outs = []
for x in inputs:
out = super().forward(x)
outs.append(out)
return outs
else:
out = super().forward(inputs[-1])
return out
\ No newline at end of file
#!/usr/bin/env bash
CONFIG=$1
GPUS=$2
PORT=${PORT:-29500}
PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
$(dirname "$0")/fgd_train.py $CONFIG --launcher pytorch ${@:3}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment