Commit 7dc6e3ae authored by Khalique Ahmed's avatar Khalique Ahmed
Browse files

Merge branch 'develop' of https://github.com/ROCmSoftwarePlatform/AMDMIGraphX into mi100_opts

parents f94d77fc a275f590
#include "verify_program.hpp"
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/make_op.hpp>
struct test_topk_0 : verify_program<test_topk_0>
{
migraphx::program create_program() const
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::float_type, {3, 5}};
auto data = mm->add_parameter("data", s);
auto r = mm->add_instruction(
migraphx::make_op("topk", {{"axis", 1}, {"k", 4}, {"largest", 1}}), data);
auto r0 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 0}}), r);
mm->add_return({r0});
return p;
}
};
#include "verify_program.hpp"
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/make_op.hpp>
struct test_topk_1 : verify_program<test_topk_1>
{
migraphx::program create_program() const
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::float_type, {3, 5}};
auto data = mm->add_parameter("data", s);
auto r = mm->add_instruction(
migraphx::make_op("topk", {{"axis", -2}, {"k", 3}, {"largest", 1}}), data);
auto r0 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 0}}), r);
auto r1 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 1}}), r);
mm->add_return({r0, r1});
return p;
}
};
#include "verify_program.hpp"
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/make_op.hpp>
struct test_topk_2 : verify_program<test_topk_2>
{
migraphx::program create_program() const
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::float_type, {3, 5}};
auto data = mm->add_parameter("data", s);
auto r = mm->add_instruction(
migraphx::make_op("topk", {{"axis", 1}, {"k", 4}, {"largest", 0}}), data);
auto r0 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 0}}), r);
mm->add_return({r0});
return p;
}
};
#include "verify_program.hpp"
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/make_op.hpp>
struct test_topk_3 : verify_program<test_topk_3>
{
migraphx::program create_program() const
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::float_type, {3, 5}};
auto data = mm->add_parameter("data", s);
auto r = mm->add_instruction(
migraphx::make_op("topk", {{"axis", -2}, {"k", 3}, {"largest", 0}}), data);
auto r0 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 0}}), r);
auto r1 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 1}}), r);
mm->add_return({r0, r1});
return p;
}
};
......@@ -11,7 +11,8 @@ struct test_trans_abs : verify_program<test_trans_abs>
migraphx::program p;
auto* mm = p.get_main_module();
auto x = mm->add_parameter("x", migraphx::shape{migraphx::shape::float_type, {4, 3, 3, 3}});
auto tx = mm->add_instruction(migraphx::make_op("transpose", {{"dims", {0, 1, 3, 2}}}), x);
auto tx =
mm->add_instruction(migraphx::make_op("transpose", {{"permutation", {0, 1, 3, 2}}}), x);
auto absx = mm->add_instruction(migraphx::make_op("abs"), tx);
auto r = mm->add_instruction(migraphx::make_op("add"), absx, absx);
mm->add_instruction(migraphx::make_op("contiguous"), r);
......
......@@ -11,7 +11,8 @@ struct test_trans_ret : verify_program<test_trans_ret>
migraphx::program p;
auto* mm = p.get_main_module();
auto x = mm->add_parameter("x", migraphx::shape{migraphx::shape::float_type, {4, 3, 3, 3}});
auto tx = mm->add_instruction(migraphx::make_op("transpose", {{"dims", {0, 1, 3, 2}}}), x);
auto tx =
mm->add_instruction(migraphx::make_op("transpose", {{"permutation", {0, 1, 3, 2}}}), x);
mm->add_return({tx});
return p;
......
......@@ -11,7 +11,8 @@ struct test_trans_tanh : verify_program<test_trans_tanh>
migraphx::program p;
auto* mm = p.get_main_module();
auto x = mm->add_parameter("x", migraphx::shape{migraphx::shape::float_type, {4, 3, 3, 3}});
auto tx = mm->add_instruction(migraphx::make_op("transpose", {{"dims", {0, 1, 3, 2}}}), x);
auto tx =
mm->add_instruction(migraphx::make_op("transpose", {{"permutation", {0, 1, 3, 2}}}), x);
auto tanhx = mm->add_instruction(migraphx::make_op("tanh"), tx);
auto r = mm->add_instruction(migraphx::make_op("add"), tanhx, tanhx);
mm->add_instruction(migraphx::make_op("contiguous"), r);
......
......@@ -11,7 +11,8 @@ struct test_trans_tanh1 : verify_program<test_trans_tanh1>
migraphx::program p;
auto* mm = p.get_main_module();
auto x = mm->add_parameter("x", migraphx::shape{migraphx::shape::float_type, {4, 3, 3, 3}});
auto tx = mm->add_instruction(migraphx::make_op("transpose", {{"dims", {0, 1, 3, 2}}}), x);
auto tx =
mm->add_instruction(migraphx::make_op("transpose", {{"permutation", {0, 1, 3, 2}}}), x);
auto tanhx = mm->add_instruction(migraphx::make_op("tanh"), tx);
auto r = mm->add_instruction(migraphx::make_op("add"), tanhx, tanhx);
mm->add_return({tx, r});
......
......@@ -13,7 +13,7 @@ struct test_transpose : verify_program<test_transpose>
migraphx::shape s{migraphx::shape::float_type, {4, 3, 4, 4}};
auto x = mm->add_parameter("x", s);
std::vector<int64_t> perm = {0, 2, 3, 1};
auto l = mm->add_instruction(migraphx::make_op("transpose", {{"dims", perm}}), x);
auto l = mm->add_instruction(migraphx::make_op("transpose", {{"permutation", perm}}), x);
mm->add_instruction(migraphx::make_op("contiguous"), l);
return p;
}
......
......@@ -16,7 +16,7 @@ struct test_triadd2 : verify_program<test_triadd2>
auto y = mm->add_parameter("y", s);
auto z = mm->add_parameter("z", b);
auto zb = mm->add_instruction(
migraphx::make_op("broadcast", {{"axis", 1}, {"dims", s.lens()}}), z);
migraphx::make_op("broadcast", {{"axis", 1}, {"out_lens", s.lens()}}), z);
auto sum = mm->add_instruction(migraphx::make_op("add"), x, y);
mm->add_instruction(migraphx::make_op("add"), sum, zb);
return p;
......
......@@ -17,7 +17,7 @@ struct test_triadd_broadcast : verify_program<test_triadd_broadcast>
auto y = mm->add_parameter("y", {migraphx::shape::float_type, {2, 2}});
auto z = mm->add_parameter("z", {migraphx::shape::float_type, {2, 2, 3}});
auto by = mm->add_instruction(
migraphx::make_op("broadcast", {{"axis", 0}, {"dims", x->get_shape().lens()}}), y);
migraphx::make_op("broadcast", {{"axis", 0}, {"out_lens", x->get_shape().lens()}}), y);
auto sum = mm->add_instruction(migraphx::make_op("add"), x, by);
mm->add_instruction(migraphx::make_op("add"), sum, z);
return p;
......
#include "verify_program.hpp"
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/make_op.hpp>
struct test_where : verify_program<test_where>
{
migraphx::program create_program() const
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape sb{migraphx::shape::bool_type, {1, 3, 4, 5}};
migraphx::shape sx{migraphx::shape::float_type, {1, 3, 4, 5}};
auto b = mm->add_parameter("b", sb);
auto x = mm->add_parameter("x", sx);
auto y = mm->add_parameter("y", sx);
auto r = mm->add_instruction(migraphx::make_op("where"), b, x, y);
mm->add_return({r});
return p;
};
};
#include "verify_program.hpp"
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/make_op.hpp>
struct test_where2 : verify_program<test_where2>
{
migraphx::program create_program() const
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape sb{migraphx::shape::bool_type, {1, 3, 4, 5}};
migraphx::shape sx{migraphx::shape::float_type, {1}};
auto b = mm->add_parameter("b", sb);
auto x = mm->add_parameter("x", sx);
auto y = mm->add_parameter("y", sx);
auto mbx = mm->add_instruction(
migraphx::make_op("multibroadcast", {{"out_lens", {1, 3, 4, 5}}}), x);
auto mby = mm->add_instruction(
migraphx::make_op("multibroadcast", {{"out_lens", {1, 3, 4, 5}}}), y);
auto r = mm->add_instruction(migraphx::make_op("where"), b, mbx, mby);
mm->add_return({r});
return p;
};
};
......@@ -93,6 +93,11 @@ void set_default_dim_value(onnx_options& options, size_t value)
options.default_dim_value = value;
}
void set_default_loop_iterations(onnx_options& options, int64_t value)
{
options.max_loop_iterations = value;
}
void set_nhwc(tf_options& options, bool is_nhwc) { options.is_nhwc = is_nhwc; }
void set_default_dim_value(tf_options& options, size_t value) { options.batch_size = value; }
......
......@@ -256,6 +256,18 @@ argument compute_op(const T& x,
return compute_op(rank<1>{}, x, output, inputs, module_args, f);
}
template <class T, class F>
auto compute_op(rank<4>,
const T& x,
context& ctx,
const shape& output,
const std::vector<argument>& inputs,
const std::vector<module_ref>& module_args,
F f) -> decltype(x.compute(auto_any_cast(ctx), output, inputs, module_args, f))
{
return x.compute(auto_any_cast(ctx), output, inputs, module_args, f);
}
template <class T, class F>
auto compute_op(rank<3>,
const T& x,
......@@ -313,7 +325,7 @@ argument compute_op(const T& x,
const std::vector<module_ref>& module_args,
F f)
{
return compute_op(rank<3>{}, x, ctx, output, inputs, module_args, f);
return compute_op(rank<4>{}, x, ctx, output, inputs, module_args, f);
}
template <class T>
......
......@@ -8,12 +8,14 @@
#include <utility>
#include <migraphx/functional.hpp>
#include <migraphx/config.hpp>
#include <migraphx/rank.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
struct program;
struct module;
struct module_pass_manager;
#ifdef DOXYGEN
......@@ -24,6 +26,7 @@ struct pass
/// A unique name used to identify the pass
std::string name() const;
/// Run the pass on the module
void apply(module_pass_manager& mpm) const;
void apply(module& m) const;
/// Run the pass on the program
void apply(program& p) const;
......@@ -31,10 +34,34 @@ struct pass
#else
module& get_module(module_pass_manager& mpm);
namespace detail {
template <class T>
auto module_pass_manager_apply(rank<1>, const T& x, module_pass_manager& mpm)
-> decltype(x.apply(get_module(mpm)))
{
return x.apply(get_module(mpm));
}
template <class T>
void module_pass_manager_apply(rank<0>, const T&, module_pass_manager&)
{
}
template <class T>
void module_pass_manager_apply(const T& x, module_pass_manager& mpm)
{
module_pass_manager_apply(rank<1>{}, x, mpm);
}
} // namespace detail
<%
interface('pass',
virtual('name', returns='std::string', const=True),
virtual('apply', returns='void', m='module &', const=True, default='migraphx::nop'),
virtual('apply', returns='void', mpm='module_pass_manager &', const=True, default='migraphx::detail::module_pass_manager_apply'),
virtual('apply', returns='void', p='program &', const=True, default='migraphx::nop')
)
%>
......
......@@ -2,6 +2,8 @@
#
# Build MIGraphX prerequisites for docker container
set -e
#install pip3, rocm-cmake, rocblas and miopen
apt update && apt install -y python3-pip rocm-cmake rocblas miopen-hip openmp-extras
......
import os
import numpy as np
import argparse
import onnx
from onnx import numpy_helper
import migraphx
def parse_args():
parser = argparse.ArgumentParser(description="MIGraphX test runner")
parser.add_argument('test_dir',
type=str,
metavar='test_loc',
help='folder where the test is stored')
parser.add_argument('--target',
type=str,
default='gpu',
help='Specify where the tests execute (ref, gpu)')
args = parser.parse_args()
return args
def get_sub_folders(dir_name):
dir_contents = os.listdir(dir_name)
folders = []
for item in dir_contents:
tmp_item = dir_name + '/' + item
if os.path.isdir(tmp_item):
folders.append(item)
folders.sort()
return folders
def get_test_cases(dir_name):
return get_sub_folders(dir_name)
def get_model_name(dir_name):
dir_contents = os.listdir(dir_name)
for item in dir_contents:
file_name = dir_name + '/' + item
if os.path.isfile(file_name) and file_name.endswith('.onnx'):
return item
return ''
def read_pb_file(filename):
with open(filename, 'rb') as pfile:
data_str = pfile.read()
tensor = onnx.TensorProto()
tensor.ParseFromString(data_str)
np_array = numpy_helper.to_array(tensor)
return np_array
def wrapup_inputs(io_folder, parameter_names):
index = 0
param_map = {}
for param_name in parameter_names:
file_name = io_folder + '/input_' + str(index) + '.pb'
data = read_pb_file(file_name)
param_map[param_name] = data
index = index + 1
return param_map
def read_outputs(io_folder, out_num):
outputs = []
for i in range(out_num):
file_name = io_folder + '/output_' + str(i) + '.pb'
data = read_pb_file(file_name)
outputs.append(data)
return outputs
def run_one_case(model, param_map):
# convert np array to model argument
pp = {}
for key, val in param_map.items():
print("input = {}".format(val))
pp[key] = migraphx.argument(val)
# run the model
model_outputs = model.run(param_map)
# convert argument to np array
outputs = []
for output in model_outputs:
outputs.append(np.array(output))
return outputs
def check_correctness(gold_outputs, outputs, rtol=1e-3, atol=1e-3):
if len(gold_outputs) != len(outputs):
print("Number of outputs {} is not equal to expected number {}".format(
len(outputs), len(gold_outputs)))
return False
out_num = len(gold_outputs)
ret = True
for i in range(out_num):
print("Expected value: \n{}".format(gold_outputs[i]))
print("Actual value: \n{}".format(outputs[i]))
if not np.allclose(gold_outputs[i], outputs[i], rtol, atol):
print("Output {} is incorrect ...".format(i))
print("Expected value: \n{}".format(gold_outputs[i]))
print("Actual value: \n{}".format(outputs[i]))
ret = False
return ret
def tune_input_shape(model, input_data):
param_shapes = model.get_parameter_shapes()
input_shapes = {}
for name, s in param_shapes.items():
assert name in input_data
data_shape = list(input_data[name].shape)
if not np.array_equal(data_shape, s.lens()):
input_shapes[name] = data_shape
return input_shapes
def main():
args = parse_args()
test_loc = args.test_dir
target = args.target
test_name = os.path.basename(os.path.normpath(test_loc))
print("Running test \"{}\" on target \"{}\" ...\n".format(
test_name, target))
# get model full path
model_name = get_model_name(test_loc)
model_path_name = test_loc + '/' + model_name
# read and compile model
model = migraphx.parse_onnx(model_path_name)
param_names = model.get_parameter_names()
output_shapes = model.get_output_shapes()
model.compile(migraphx.get_target(target))
# get test cases
cases = get_test_cases(test_loc)
case_num = len(cases)
correct_num = 0
for case_name in cases:
io_folder = test_loc + '/' + case_name
input_data = wrapup_inputs(io_folder, param_names)
gold_output_data = read_outputs(io_folder, len(output_shapes))
# if input shape is different from model shape, reload and recompile
# model
input_shapes = tune_input_shape(model, input_data)
if not len(input_shapes) == 0:
model = migraphx.parse_onnx(model_path_name,
map_input_dims=input_shapes)
model.compile(migraphx.get_target(target))
# run the model and return outputs
output_data = run_one_case(model, input_data)
# check output correctness
ret = check_correctness(gold_output_data, output_data)
if ret:
correct_num += 1
output_str = "PASSED" if ret else "FAILED"
print("\tCase {}: {}".format(case_name, output_str))
print("\nTest \"{}\" has {} cases:".format(test_name, case_num))
print("\t Passed: {}".format(correct_num))
print("\t Failed: {}".format(case_num - correct_num))
if case_num > correct_num:
error_num = case_num - correct_num
raise ValueError(str(error_num) + " cases failed!")
if __name__ == "__main__":
main()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment