deepspeed.json 754 Bytes
Newer Older
hepj987's avatar
hepj987 committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
{
    "train_micro_batch_size_per_gpu": "auto",
    "zero_allow_untested_optimizer": true,
    "fp16": {
      "enabled": true,
      "loss_scale": 0,
      "initial_scale_power": 11,
      "loss_scale_window": 1000,
      "hysteresis": 2,
      "min_loss_scale": 1
    },
    "zero_optimization": {
      "stage": 3,
      "allgather_partitions": true,
      "allgather_bucket_size": 5e8,
      "overlap_comm": false,
      "reduce_scatter": true,
      "reduce_bucket_size": 5e8,
      "contiguous_gradients" : true
    },
   "gradient_accumulation_steps": "auto",
      "gradient_clipping": "auto",
      "steps_per_print": 2000,
      "train_batch_size":  "auto",
      "train_micro_batch_size_per_gpu": 1,
      "wall_clock_breakdown": false
  }