optimizer_300e.yml 288 Bytes
Newer Older
dlyrm's avatar
dlyrm committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
epoch: 300

LearningRate:
  base_lr: 0.01
  schedulers:
  - !YOLOv5LRDecay
    max_epochs: 300
    min_lr_ratio: 0.01
  - !ExpWarmup
    epochs: 3

OptimizerBuilder:
  optimizer:
    type: Momentum
    momentum: 0.937
    use_nesterov: True
  regularizer:
    factor: 0.0005
    type: L2