Commit
•
fa569cd
1
Parent(s):
bd2dda8
Add support for different normalization layers (#7377)
Browse files* Add support for different normalization layers.
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* Cleanup
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Glenn Jocher <[email protected]>
train.py
CHANGED
@@ -151,10 +151,11 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|
151 |
LOGGER.info(f"Scaled weight_decay = {hyp['weight_decay']}")
|
152 |
|
153 |
g = [], [], [] # optimizer parameter groups
|
|
|
154 |
for v in model.modules():
|
155 |
if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias
|
156 |
g[2].append(v.bias)
|
157 |
-
if isinstance(v,
|
158 |
g[1].append(v.weight)
|
159 |
elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay)
|
160 |
g[0].append(v.weight)
|
|
|
151 |
LOGGER.info(f"Scaled weight_decay = {hyp['weight_decay']}")
|
152 |
|
153 |
g = [], [], [] # optimizer parameter groups
|
154 |
+
bn = nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, nn.LayerNorm
|
155 |
for v in model.modules():
|
156 |
if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias
|
157 |
g[2].append(v.bias)
|
158 |
+
if isinstance(v, bn): # weight (no decay)
|
159 |
g[1].append(v.weight)
|
160 |
elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay)
|
161 |
g[0].append(v.weight)
|