tune_models.py 3.47 KB
Newer Older
1
import os, subprocess, argparse, time, json, difflib
Alan Turner's avatar
Alan Turner committed
2
3
import tune_ck as tc

Alan Turner's avatar
Alan Turner committed
4

Alan Turner's avatar
Alan Turner committed
5
def parse_args():
Alan Turner's avatar
Alan Turner committed
6
7
    parser = argparse.ArgumentParser(
        description="Tune CK GEMMs for one or more ONNX models")
Alan Turner's avatar
Alan Turner committed
8
    parser.add_argument('--models',
Alan Turner's avatar
Alan Turner committed
9
10
11
                        '-m',
                        nargs='+',
                        help='ONNX models to be tuned',
Alan Turner's avatar
Alan Turner committed
12
13
                        required=True)
    parser.add_argument('--batch_sizes',
Alan Turner's avatar
Alan Turner committed
14
15
16
                        '-b',
                        nargs='+',
                        help='Batch sizes to tune',
Alan Turner's avatar
Alan Turner committed
17
                        required=True)
Alan Turner's avatar
Alan Turner committed
18
19
20
21
    parser.add_argument('--sequence_length',
                        '-s',
                        type=int,
                        default=384,
Alan Turner's avatar
Alan Turner committed
22
                        help='Sequence length for transformer models')
Alan Turner's avatar
Alan Turner committed
23
24
    parser.add_argument('-n',
                        type=int,
25
                        default=18,
Alan Turner's avatar
Alan Turner committed
26
                        help='Number of instances to tune')
Alan Turner's avatar
Alan Turner committed
27
28
29
30
31
32
    parser.add_argument(
        '--update',
        '-u',
        type=str,
        help=
        'Existing tuning JSON. Configs already present will not be re-tuned.')
Alan Turner's avatar
Alan Turner committed
33
34
35
    args = parser.parse_args()
    return args

Alan Turner's avatar
Alan Turner committed
36

37
def tune_models(models, batch_sizes, seq_len, n, existing):
Alan Turner's avatar
Alan Turner committed
38
39
40
41
42
    time_stamp = time.strftime("%Y_%m_%d_%H_%M")
    log_file = "ck_tuning_{}.log".format(time_stamp)
    json_file = "ck_tuning_{}.json".format(time_stamp)
    for model in models:
        for batch in batch_sizes:
Alan Turner's avatar
Alan Turner committed
43
44
            params = "--input-dim @sample {} 4 64 64 @timestep 1 @encoder_hidden_states {} 64 1024 --fp16 ".format(
                batch, batch)
Alan Turner's avatar
Alan Turner committed
45
            if "bert" in model:
Alan Turner's avatar
Alan Turner committed
46
47
                params = "--fill1 input_ids --input-dim @input_ids {} {} ".format(
                    batch, seq_len)
Alan Turner's avatar
Alan Turner committed
48
            out = subprocess.run(
Alan Turner's avatar
Alan Turner committed
49
50
                'MIGRAPHX_LOG_CK_GEMM=1 ../build/bin/driver run {} -g {} | grep \'ck_gemm.*: \[{{\' | sort -u >> {}'
                .format(model, params, log_file),
Alan Turner's avatar
Alan Turner committed
51
52
53
54
                capture_output=True,
                check=True,
                shell=True)

55
56
57
58
59
60
61
    if (existing is not None):
        f = open(existing)
        configs = json.load(f)
        configs = [str(s).replace(" ", "") for l in configs for s in l]
        update_logs = []
        with open(log_file, "r") as lf:
            logs = [line for line in lf]
Alan Turner's avatar
Alan Turner committed
62
63
64
65
66
67
68
69
            stripped_logs = [
                line.replace("ck_gemm: ",
                             "").replace("ck_gemm_softmax_gemm: ",
                                         "").replace("\"",
                                                     "'").replace("\n", "")
                for line in logs
            ]

70
71
72
            for i in range(len(stripped_logs)):
                if (stripped_logs[i] not in configs):
                    update_logs.append(logs[i])
Alan Turner's avatar
Alan Turner committed
73

74
75
76
        with open(log_file, "w") as lf:
            for line in update_logs:
                lf.write(line)
Alan Turner's avatar
Alan Turner committed
77

78
79
        f.close()

Alan Turner's avatar
Alan Turner committed
80
    tc.tune(log_file, n, json_file)
81
82
83
84
85
86
87
88
89
90
91

    if (existing is not None):
        f_old = open(existing, "r")
        f_new = open(json_file, "r")
        old = json.load(f_old)
        new = json.load(f_new)
        new = old + new
        f_old.close()
        f_new.close()
        json.dump(new, open(json_file, "w"))

Alan Turner's avatar
Alan Turner committed
92
93
    tuning_path = os.path.abspath(json_file)
    os.environ["MIGRAPHX_CK_TUNING"] = tuning_path
Alan Turner's avatar
Alan Turner committed
94
95
    print("\nTuning results have been saved to:\n{}\n".format(json_file))

Alan Turner's avatar
Alan Turner committed
96

Alan Turner's avatar
Alan Turner committed
97
def run(args):
Alan Turner's avatar
Alan Turner committed
98
99
    tune_models(args.models, args.batch_sizes, args.sequence_length, args.n,
                args.update)
Alan Turner's avatar
Alan Turner committed
100

Alan Turner's avatar
Alan Turner committed
101
102

run(parse_args())