tune_models.py 3.11 KB
Newer Older
1
import os, subprocess, argparse, time, json, difflib
Alan Turner's avatar
Alan Turner committed
2
3
import tune_ck as tc

Alan Turner's avatar
Alan Turner committed
4

Alan Turner's avatar
Alan Turner committed
5
def parse_args():
Alan Turner's avatar
Alan Turner committed
6
7
    parser = argparse.ArgumentParser(
        description="Tune CK GEMMs for one or more ONNX models")
Alan Turner's avatar
Alan Turner committed
8
    parser.add_argument('--models',
Alan Turner's avatar
Alan Turner committed
9
10
11
                        '-m',
                        nargs='+',
                        help='ONNX models to be tuned',
Alan Turner's avatar
Alan Turner committed
12
13
                        required=True)
    parser.add_argument('--batch_sizes',
Alan Turner's avatar
Alan Turner committed
14
15
16
                        '-b',
                        nargs='+',
                        help='Batch sizes to tune',
Alan Turner's avatar
Alan Turner committed
17
                        required=True)
Alan Turner's avatar
Alan Turner committed
18
19
20
21
    parser.add_argument('--sequence_length',
                        '-s',
                        type=int,
                        default=384,
Alan Turner's avatar
Alan Turner committed
22
                        help='Sequence length for transformer models')
Alan Turner's avatar
Alan Turner committed
23
24
    parser.add_argument('-n',
                        type=int,
25
                        default=18,
Alan Turner's avatar
Alan Turner committed
26
                        help='Number of instances to tune')
27
28
29
30
    parser.add_argument('--update',
                        '-u',
                        type=str,
                        help='Existing tuning JSON. Configs already present will not be re-tuned.')
Alan Turner's avatar
Alan Turner committed
31
32
33
    args = parser.parse_args()
    return args

Alan Turner's avatar
Alan Turner committed
34

35
def tune_models(models, batch_sizes, seq_len, n, existing):
Alan Turner's avatar
Alan Turner committed
36
37
38
39
40
    time_stamp = time.strftime("%Y_%m_%d_%H_%M")
    log_file = "ck_tuning_{}.log".format(time_stamp)
    json_file = "ck_tuning_{}.json".format(time_stamp)
    for model in models:
        for batch in batch_sizes:
Alan Turner's avatar
Alan Turner committed
41
42
43
44
45
46
47
            out = subprocess.run(
                'MIGRAPHX_LOG_CK_GEMM=1 ../build/bin/driver run {} -g --fill1 input_ids --input-dim @input_ids {} {}  | grep \'ck_gemm.*: \[{{\' | sort -u >> {}'
                .format(model, batch, seq_len, log_file),
                capture_output=True,
                check=True,
                shell=True)

48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
    if (existing is not None):
        f = open(existing)
        configs = json.load(f)
        configs = [str(s).replace(" ", "") for l in configs for s in l]
        update_logs = []
        with open(log_file, "r") as lf:
            logs = [line for line in lf]
            stripped_logs = [line.replace("ck_gemm: ", "").replace("ck_gemm_softmax_gemm: ", "").replace("\"", "'").replace("\n", "") for line in logs]
            
            for i in range(len(stripped_logs)):
                if (stripped_logs[i] not in configs):
                    update_logs.append(logs[i])
        
        with open(log_file, "w") as lf:
            for line in update_logs:
                lf.write(line)
        
        f.close()

Alan Turner's avatar
Alan Turner committed
67
    tc.tune(log_file, n, json_file)
68
69
70
71
72
73
74
75
76
77
78

    if (existing is not None):
        f_old = open(existing, "r")
        f_new = open(json_file, "r")
        old = json.load(f_old)
        new = json.load(f_new)
        new = old + new
        f_old.close()
        f_new.close()
        json.dump(new, open(json_file, "w"))

Alan Turner's avatar
Alan Turner committed
79
80
    tuning_path = os.path.abspath(json_file)
    os.environ["MIGRAPHX_CK_TUNING"] = tuning_path
Alan Turner's avatar
Alan Turner committed
81
82
    print("\nTuning results have been saved to:\n{}\n".format(json_file))

Alan Turner's avatar
Alan Turner committed
83

Alan Turner's avatar
Alan Turner committed
84
def run(args):
85
    tune_models(args.models, args.batch_sizes, args.sequence_length, args.n, args.update)
Alan Turner's avatar
Alan Turner committed
86

Alan Turner's avatar
Alan Turner committed
87
88

run(parse_args())