Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
MINGtoMING committed Jul 27, 2024
1 parent b6db227 commit 2844dd6
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 1 deletion.
1 change: 1 addition & 0 deletions configs/rtdetrv2/_base_/rtdetrv2_r50vd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ ema_filter_no_grad: True
hidden_dim: 256
use_focal_loss: True
eval_size: [640, 640]
reset_norm_param_attr: True


DETR:
Expand Down
36 changes: 35 additions & 1 deletion ppdet/engine/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,12 @@ def __init__(self, cfg, mode='train'):
m._epsilon = 1e-3 # for amp(fp16)
m._momentum = 0.97 # 0.03 in pytorch

#normalize params for deploy
# reset norm param attr for setting them in optimizer
if cfg['reset_norm_param_attr']:
self.model = self.reset_norm_param_attr(
self.model, weight_attr=None, bias_attr=None)

# normalize params for deploy
if 'slim' in cfg and cfg['slim_type'] == 'OFA':
self.model.model.load_meanstd(cfg['TestReader'][
'sample_transforms'])
Expand Down Expand Up @@ -1450,3 +1455,32 @@ def setup_metrics_for_loader():
imshow_lanes(img, lanes, out_file=out_file)

return results

def reset_norm_param_attr(self, layer, **kwargs):
if isinstance(layer, (nn.BatchNorm2D, nn.LayerNorm, nn.GroupNorm)):
src_state_dict = layer.state_dict()
if isinstance(layer, nn.BatchNorm2D):
layer = nn.BatchNorm2D(
num_features=layer._num_features,
momentum=layer._momentum,
epsilon=layer._epsilon,
**kwargs)
elif isinstance(layer, nn.LayerNorm):
layer = nn.LayerNorm(
normalized_shape=layer._normalized_shape,
epsilon=layer._epsilon,
**kwargs)
else:
layer = nn.GroupNorm(
num_groups=layer._num_groups,
num_channels=layer._num_channels,
epsilon=layer._epsilon,
**kwargs)
layer.set_state_dict(src_state_dict)
else:
for name, sublayer in layer.named_children():
new_sublayer = self.reset_norm_param_attr(sublayer, **kwargs)
if new_sublayer is not sublayer:
setattr(layer, name, new_sublayer)

return layer

0 comments on commit 2844dd6

Please sign in to comment.