config.py 2.18 KB
Newer Older
yuguo960516's avatar
yuguo960516 committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import os
import sys

dir_path = os.path.abspath(os.path.dirname(__file__))
dir_path = "/".join(dir_path.split("/")[:-1])
sys.path.append(dir_path)

from omegaconf import OmegaConf  # noqa

from dataset.dataset import CoupletsDataset  # noqa
from modeling.model import Seq2Seq  # noqa

from libai.config import get_config  # noqa
from libai.config import LazyCall  # noqa
from libai.data.build import build_nlp_train_loader, build_nlp_test_loader  # noqa

optim = get_config("common/optim.py").optim
graph = get_config("common/models/graph.py").graph
train = get_config("common/train.py").train

dataloader = OmegaConf.create()
dataloader.train = LazyCall(build_nlp_train_loader)(
    dataset=[
        LazyCall(CoupletsDataset)(
            path="data_test/couplets",
            is_train=True,
            maxlen=64,
        )
    ],
    num_workers=4,
)
dataloader.test = [
    LazyCall(build_nlp_test_loader)(
        dataset=LazyCall(CoupletsDataset)(
            path="data_test/couplets",
            is_train=False,
            maxlen=64,
        ),
        num_workers=4,
    )
]

transformer_cfg = dict(
    vocab_size=9027,
    max_position_embeddings=64,
    hidden_size=512,
    intermediate_size=512,
    hidden_layers=6,
    num_attention_heads=8,
    embedding_dropout_prob=0.1,
    hidden_dropout_prob=0.1,
    attention_dropout_prob=0.1,
    initializer_range=0.02,
    layernorm_epsilon=1e-5,
    bias_gelu_fusion=False,
    bias_dropout_fusion=False,
    scale_mask_softmax_fusion=False,
    apply_query_key_layer_scaling=True,
)
model = LazyCall(Seq2Seq)(cfg=transformer_cfg)

train.update(
    dict(
        rdma_enabled=False,
        recompute_grad=dict(enabled=False),
        amp=dict(enabled=False),
        output_dir="output/couplet/",
        train_micro_batch_size=128,
        test_micro_batch_size=32,
        train_epoch=20,
        train_iter=0,
        log_period=10,
        warmup_ratio=0.01,
        dist=dict(
            data_parallel_size=1,
            tensor_parallel_size=1,
            pipeline_parallel_size=1,
            pipeline_stage_id=None,
            pipeline_num_layers=model.cfg.hidden_layers * 2,
        ),
        evaluation=dict(
            enabled=False,
        ),
    )
)