Commit fc1aa176 authored by Alan Turner's avatar Alan Turner
Browse files

Add ability to update a ck tuning json file

parent a92cde80
...@@ -22,9 +22,6 @@ def pretty_print(obj): ...@@ -22,9 +22,6 @@ def pretty_print(obj):
def run_driver(b): def run_driver(b):
print(b) print(b)
#outfile = open("temp2.json", "w")
#json.dump(b, outfile)
#outfile.close()
with tmp_file(lambda tf: json.dump(b, tf)) as tf: with tmp_file(lambda tf: json.dump(b, tf)) as tf:
cp = subprocess.run('./bin/gpu-driver {}'.format(tf), cp = subprocess.run('./bin/gpu-driver {}'.format(tf),
capture_output=True, capture_output=True,
...@@ -48,31 +45,19 @@ def get_device_time(s): ...@@ -48,31 +45,19 @@ def get_device_time(s):
return convert_to_float(fields[-1].strip()) return convert_to_float(fields[-1].strip())
def benchmark_ck(config, tuning): def benchmark_ck(config, name, tuning):
try: try:
b0 = { b = {
'settings': { 'settings': {
'iterations': 100 'iterations': 100
}, },
'compile_op': { 'compile_op': {
'name': 'ck_gemm', 'name': name,
'check': True, 'check': True,
'tuning_val': tuning, 'tuning_val': tuning,
'inputs': config 'inputs': config
} }
} }
b1 = {
'settings': {
'iterations': 100
},
'compile_op': {
'name': 'ck_gemm_softmax_gemm',
'check': True,
'tuning_val': tuning,
'inputs': config
}
}
b = b0 if (ck_function == 0) else b1
for line in run_driver(b): for line in run_driver(b):
dtime = get_device_time(line) dtime = get_device_time(line)
print(dtime) print(dtime)
...@@ -81,8 +66,8 @@ def benchmark_ck(config, tuning): ...@@ -81,8 +66,8 @@ def benchmark_ck(config, tuning):
return sys.float_info.max return sys.float_info.max
def benchmark(config, size): def benchmark(config, name, size):
times = [benchmark_ck(config, i) for i in range(size)] times = [benchmark_ck(config, name, i) for i in range(size)]
return times.index(min(times)) return times.index(min(times))
...@@ -93,21 +78,18 @@ def parse_log(f): ...@@ -93,21 +78,18 @@ def parse_log(f):
if line.startswith('ck_gemm:'): if line.startswith('ck_gemm:'):
line = line[len('ck_gemm:'):].strip() line = line[len('ck_gemm:'):].strip()
config = json.loads(line) config = json.loads(line)
ck_function = 0 yield (config, 'ck_gemm')
yield config
if line.startswith('ck_gemm_softmax_gemm:'): if line.startswith('ck_gemm_softmax_gemm:'):
line = line[len('ck_gemm_softmax_gemm:'):].strip() line = line[len('ck_gemm_softmax_gemm:'):].strip()
config = json.loads(line) config = json.loads(line)
ck_function = 1 ck_function = 1
yield config yield (config, 'ck_gemm_softmax_gemm')
def benchmark_log(f, n): def benchmark_log(f, n):
result = [] result = []
logs = parse_log(f) for config, name in parse_log(f):
for config in logs: tuned = benchmark(config, name, n)
additional_tv = ck_function * 2
tuned = benchmark(config, n + additional_tv)
print("Tuned:", tuned) print("Tuned:", tuned)
result.append([config, tuned]) result.append([config, tuned])
return result return result
......
import os, subprocess, argparse, time import os, subprocess, argparse, time, json, difflib
import tune_ck as tc import tune_ck as tc
...@@ -22,13 +22,17 @@ def parse_args(): ...@@ -22,13 +22,17 @@ def parse_args():
help='Sequence length for transformer models') help='Sequence length for transformer models')
parser.add_argument('-n', parser.add_argument('-n',
type=int, type=int,
default=16, default=18,
help='Number of instances to tune') help='Number of instances to tune')
parser.add_argument('--update',
'-u',
type=str,
help='Existing tuning JSON. Configs already present will not be re-tuned.')
args = parser.parse_args() args = parser.parse_args()
return args return args
def tune_models(models, batch_sizes, seq_len, n): def tune_models(models, batch_sizes, seq_len, n, existing):
time_stamp = time.strftime("%Y_%m_%d_%H_%M") time_stamp = time.strftime("%Y_%m_%d_%H_%M")
log_file = "ck_tuning_{}.log".format(time_stamp) log_file = "ck_tuning_{}.log".format(time_stamp)
json_file = "ck_tuning_{}.json".format(time_stamp) json_file = "ck_tuning_{}.json".format(time_stamp)
...@@ -41,14 +45,44 @@ def tune_models(models, batch_sizes, seq_len, n): ...@@ -41,14 +45,44 @@ def tune_models(models, batch_sizes, seq_len, n):
check=True, check=True,
shell=True) shell=True)
if (existing is not None):
f = open(existing)
configs = json.load(f)
configs = [str(s).replace(" ", "") for l in configs for s in l]
update_logs = []
with open(log_file, "r") as lf:
logs = [line for line in lf]
stripped_logs = [line.replace("ck_gemm: ", "").replace("ck_gemm_softmax_gemm: ", "").replace("\"", "'").replace("\n", "") for line in logs]
for i in range(len(stripped_logs)):
if (stripped_logs[i] not in configs):
update_logs.append(logs[i])
with open(log_file, "w") as lf:
for line in update_logs:
lf.write(line)
f.close()
tc.tune(log_file, n, json_file) tc.tune(log_file, n, json_file)
if (existing is not None):
f_old = open(existing, "r")
f_new = open(json_file, "r")
old = json.load(f_old)
new = json.load(f_new)
new = old + new
f_old.close()
f_new.close()
json.dump(new, open(json_file, "w"))
tuning_path = os.path.abspath(json_file) tuning_path = os.path.abspath(json_file)
os.environ["MIGRAPHX_CK_TUNING"] = tuning_path os.environ["MIGRAPHX_CK_TUNING"] = tuning_path
print("\nTuning results have been saved to:\n{}\n".format(json_file)) print("\nTuning results have been saved to:\n{}\n".format(json_file))
def run(args): def run(args):
tune_models(args.models, args.batch_sizes, args.sequence_length, args.n) tune_models(args.models, args.batch_sizes, args.sequence_length, args.n, args.update)
run(parse_args()) run(parse_args())
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment