llama2-7b.sh 910 Bytes
Newer Older
wanglch's avatar
wanglch committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/bin/bash

cd ../src

# LLaMA2-7B-Base
for i in {0,5}; do
python -u hf_causal_model.py \
    --data_dir ../data \
    --model_name_or_path meta-llama/Llama-2-7b-base-hf \
    --save_dir ../results/LLaMA2-7B-Base \
    --num_few_shot $i
done

# LLaMA2-7B-Chat
for i in {0,5}; do
python -u hf_causal_model.py \
    --data_dir ../data \
    --model_name_or_path meta-llama/Llama-2-7b-chat-hf \
    --save_dir ../results/LLaMA2-7B-Chat \
    --num_few_shot $i
done

# LLaMA2-13B-Base
for i in {0,5}; do
python -u hf_causal_model.py \
    --data_dir ../data \
    --model_name_or_path meta-llama/Llama-2-13b-base-hf \
    --save_dir ../results/LLaMA2-13B-Base \
    --num_few_shot $i
done

# LLaMA2-13B-Chat
for i in {0,5}; do
python -u hf_causal_model.py \
    --data_dir ../data \
    --model_name_or_path meta-llama/Llama-2-13b-chat-hf \
    --save_dir ../results/LLaMA2-13B-Chat \
    --num_few_shot $i
done