multimodal-llava.yaml 984 Bytes
Newer Older
liangjing's avatar
liangjing committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
type: basic
format_version: 1
maintainers: [mcore]
loggers: [stdout]
spec:
  name: "{test_case}"
  model: multimodal-llava
  build: mcore-pyt
  nodes: 1
  gpus: 8
  platforms: dgx_a100
  time_limit: 1200
  scope: null
  script: |-
    ls
    cd /workspace/megatron-lm

    ARGUMENTS=(
        "DATA_PATH=''"
        "DATA_CACHE_PATH=''"
        "OUTPUT_PATH={assets_dir}"
        "TENSORBOARD_PATH={assets_dir}/tensorboard"
        "CHECKPOINT_PATH=/workspace/checkpoints"
        "TRAINING_SCRIPT_PATH=pretrain_vlm.py"
        "TEST_CASE_PATH=./tests/functional_tests/test_cases/{model}/{test_case}"
    )

    bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}}

products:
  - scope: [mr]
    test_case:
    - multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G
    - multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G
    - multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G
    - multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G