t5.yaml 1.32 KB
Newer Older
liangjing's avatar
liangjing committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
type: basic
format_version: 1
maintainers: [mcore]
loggers: [stdout]
spec:
  name: "{test_case}"
  model: t5
  build: mcore-pyt
  nodes: 1
  gpus: 8
  platforms: dgx_a100
  artifacts:
    /workspace/data/t5_data: text/the_pile/t5_shard00
  script: |-
    ls
    cd /workspace/megatron-lm

    ARGUMENTS=(
        "DATA_PATH=/workspace/data/t5_data"
        "DATA_CACHE_PATH=/workspace/data/cache"
        "OUTPUT_PATH={assets_dir}"
        "TENSORBOARD_PATH={assets_dir}/tensorboard"
        "CHECKPOINT_PATH=/workspace/checkpoints"
        "TRAINING_SCRIPT_PATH=pretrain_t5.py"
        "TEST_CASE_PATH=./tests/functional_tests/test_cases/{model}/{test_case}"
    )

    bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}}

products:
  - scope: [mr]
    time_limit: [1200]
    test_case:
    - t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G
    - t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G
  - scope: [weekly]
    time_limit: [9000]
    test_case:
    - t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch
    - t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1
    - t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel
    - t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1
    - t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch
    - t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1