config.py 301 Bytes
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
from colossalai.amp import AMP_TYPE


LOG_NAME = 'cifar-simclr' 

BATCH_SIZE = 512
NUM_EPOCHS = 801
LEARNING_RATE = 0.03*BATCH_SIZE/256
WEIGHT_DECAY = 0.0005
MOMENTUM = 0.9


fp16 = dict(
    mode=AMP_TYPE.TORCH,
)

dataset = dict(
    root='./dataset',
)

gradient_accumulation=2
アマデウス's avatar
アマデウス committed
22
clip_grad_norm=1.0
23