gpt3-2.7B-hf-hdim128.yaml 242 Bytes
Newer Older
Tri Dao's avatar
Tri Dao committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# @package _global_
defaults:
  - /experiment/pile/gpt3xl-hf.yaml

model:
  config:
    n_embd: 2560
    n_head: 128
    n_layer: 32

# OOM on A100 80GB even with batch_size = 1
datamodule:
  batch_size: 1

train:
  optimizer:
    lr: 1.6e-4