Commit db09d618 authored by Alan Turner's avatar Alan Turner
Browse files

Merge remote-tracking branch 'origin/develop' into ck-gsg

parents 4f2c6410 2b5c5f5e
...@@ -2158,6 +2158,24 @@ TEST_CASE(gathernd_test) ...@@ -2158,6 +2158,24 @@ TEST_CASE(gathernd_test)
EXPECT(p == prog); EXPECT(p == prog);
} }
TEST_CASE(gathernd_dyn_test)
{
migraphx::program p;
auto* mm = p.get_main_module();
auto l0 = mm->add_parameter("data",
migraphx::shape{migraphx::shape::float_type, {{2, 4, 2}, {2, 4}}});
auto l1 = mm->add_parameter("indices",
migraphx::shape{migraphx::shape::int64_type, {{1, 3}, {2, 2}}});
auto r = mm->add_instruction(migraphx::make_op("gathernd"), l0, l1);
mm->add_return({r});
migraphx::onnx_options options;
options.map_dyn_input_dims["data"] = {{2, 4, 2}, {2, 4}};
options.map_dyn_input_dims["indices"] = {{1, 3}, {2, 2}};
auto prog = migraphx::parse_onnx("gathernd_dyn_test.onnx", options);
EXPECT(p == prog);
}
TEST_CASE(gathernd_batch_dims_test) TEST_CASE(gathernd_batch_dims_test)
{ {
migraphx::program p; migraphx::program p;
...@@ -2672,14 +2690,16 @@ TEST_CASE(if_else_test) ...@@ -2672,14 +2690,16 @@ TEST_CASE(if_else_test)
migraphx::program p; migraphx::program p;
auto* mm = p.get_main_module(); auto* mm = p.get_main_module();
migraphx::shape sc{migraphx::shape::bool_type, {1}}; migraphx::shape sc{migraphx::shape::bool_type, {1}};
auto cond = mm->add_literal(migraphx::literal(sc, {0}));
migraphx::shape s{migraphx::shape::float_type, {2, 3}}; migraphx::shape s{migraphx::shape::float_type, {2, 3}};
std::vector<float> ones(s.elements(), 1.0f); std::vector<float> ones(s.elements(), 1.0f);
auto l1 = mm->add_literal(s, ones); std::vector<float> rand = {1.3865, -0.494756, -0.283504, 0.200491, -0.490031, 1.32388};
std::vector<float> rand = {-0.583375, 0.633757, 0.0668345, -0.479422, -0.604634, 0.0388589};
auto l2 = mm->add_literal(s, rand); auto l1 = mm->add_literal(s, ones);
auto x = mm->add_parameter("x", s); auto l2 = mm->add_literal(s, rand);
auto y = mm->add_parameter("y", s); auto x = mm->add_parameter("x", s);
auto y = mm->add_parameter("y", s);
auto cond = mm->add_parameter("cond", sc);
auto* then_mod = p.create_module("If_5_if"); auto* then_mod = p.create_module("If_5_if");
auto rt = then_mod->add_instruction(migraphx::make_op("add"), x, l1); auto rt = then_mod->add_instruction(migraphx::make_op("add"), x, l1);
...@@ -2693,15 +2713,32 @@ TEST_CASE(if_else_test) ...@@ -2693,15 +2713,32 @@ TEST_CASE(if_else_test)
auto r = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 0}}), ret); auto r = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 0}}), ret);
mm->add_return({r}); mm->add_return({r});
std::ifstream ifs("if_else_test.onnx", std::ios::binary); auto prog = migraphx::parse_onnx("if_else_test.onnx");
ifs.seekg(0, std::ios::end); EXPECT(p == prog);
auto length = ifs.tellg(); }
ifs.seekg(0, std::ios::beg);
std::vector<char> onnx_buffer(length);
ifs.read(onnx_buffer.data(), length);
ifs.close();
auto prog = migraphx::parse_onnx_buffer(onnx_buffer.data(), length, {}); TEST_CASE(if_else_test_inlined)
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape sc{migraphx::shape::bool_type, {1}};
mm->add_literal(migraphx::literal(sc, {0}));
migraphx::shape s{migraphx::shape::float_type, {2, 3}};
std::vector<float> ones(s.elements(), 1.0f);
mm->add_literal(s, ones);
std::vector<float> rand = {0.811412, -0.949771, -0.169276, 0.36552, -0.14801, 2.07061};
auto l2 = mm->add_literal(s, rand);
mm->add_parameter("x", s);
auto y = mm->add_parameter("y", s);
auto re = mm->add_instruction(migraphx::make_op("mul"), y, l2);
mm->add_return({re});
auto prog = migraphx::parse_onnx("if_else_test_inlined.onnx");
EXPECT(p == prog); EXPECT(p == prog);
} }
...@@ -2774,6 +2811,70 @@ TEST_CASE(if_param_test) ...@@ -2774,6 +2811,70 @@ TEST_CASE(if_param_test)
EXPECT(p == prog); EXPECT(p == prog);
} }
TEST_CASE(if_then_else_multi_output_shapes_inlined_test)
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape sc{migraphx::shape::bool_type, {1}};
mm->add_literal(migraphx::literal(sc, {1}));
migraphx::shape s{migraphx::shape::float_type, {2, 3}};
migraphx::shape s_trail{migraphx::shape::float_type, {2, 3, 1}};
std::vector<float> ones(s.elements(), 1.0f);
auto l1 = mm->add_literal(s_trail, ones);
std::vector<float> rand = {-1.01837, -0.305541, -0.254105, 0.892955, 1.38714, -0.584205};
mm->add_literal(s, rand);
auto x = mm->add_parameter("x", s_trail);
mm->add_parameter("y", s);
auto rt = mm->add_instruction(migraphx::make_op("add"), x, l1);
auto rt2 = mm->add_instruction(migraphx::make_op("add"), x, x);
mm->add_return({rt, rt2});
auto prog = migraphx::parse_onnx("if_then_else_multi_output_shapes_inlined_test.onnx");
EXPECT(p == prog);
}
TEST_CASE(if_then_else_multi_output_shapes_test)
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape sc{migraphx::shape::bool_type, {1}};
migraphx::shape s{migraphx::shape::float_type, {2, 3, 1}};
migraphx::shape s_trail{migraphx::shape::float_type, {2, 3, 1}};
std::vector<float> ones(s.elements(), 1.0f);
auto l1 = mm->add_literal(s_trail, ones);
std::vector<float> rand = {-0.753997, 0.707831, -0.865795, 2.49574, 0.464937, -0.168745};
auto l2 = mm->add_literal(s, rand);
auto x = mm->add_parameter("x", s_trail);
auto y = mm->add_parameter("y", s);
auto cond = mm->add_parameter("cond", sc);
auto* then_mod = p.create_module("If_5_if");
auto rt = then_mod->add_instruction(migraphx::make_op("add"), x, l1);
auto rt2 = then_mod->add_instruction(migraphx::make_op("add"), x, x);
then_mod->add_return({rt, rt2});
auto* else_mod = p.create_module("If_5_else");
auto re = else_mod->add_instruction(migraphx::make_op("mul"), y, l2);
auto re2 = else_mod->add_instruction(migraphx::make_op("sub"), y, l2);
else_mod->add_return({re, re2});
auto ret = mm->add_instruction(migraphx::make_op("if"), {cond}, {then_mod, else_mod});
auto r1 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 0}}), ret);
auto r2 = mm->add_instruction(migraphx::make_op("get_tuple_elem", {{"index", 1}}), ret);
mm->add_return({r1, r2});
auto prog = migraphx::parse_onnx("if_then_else_multi_output_shapes_test.onnx");
EXPECT(p == prog);
}
TEST_CASE(if_pl_test) TEST_CASE(if_pl_test)
{ {
migraphx::program p; migraphx::program p;
...@@ -2814,14 +2915,16 @@ TEST_CASE(if_then_test) ...@@ -2814,14 +2915,16 @@ TEST_CASE(if_then_test)
migraphx::program p; migraphx::program p;
auto* mm = p.get_main_module(); auto* mm = p.get_main_module();
migraphx::shape sc{migraphx::shape::bool_type, {1}}; migraphx::shape sc{migraphx::shape::bool_type, {1}};
auto cond = mm->add_literal(migraphx::literal(sc, {1}));
migraphx::shape s{migraphx::shape::float_type, {2, 3}}; migraphx::shape s{migraphx::shape::float_type, {2, 3}};
std::vector<float> ones(s.elements(), 1.0f); std::vector<float> ones(s.elements(), 1.0f);
auto l1 = mm->add_literal(s, ones); std::vector<float> rand = {-0.266913, -0.180328, -0.124268, -1.23768, 0.312334, 1.18475};
std::vector<float> rand = {-1.26487, -2.42279, 0.990835, 1.63072, 0.812238, -0.174946};
auto l2 = mm->add_literal(s, rand); auto l1 = mm->add_literal(s, ones);
auto x = mm->add_parameter("x", s); auto l2 = mm->add_literal(s, rand);
auto y = mm->add_parameter("y", s); auto x = mm->add_parameter("x", s);
auto y = mm->add_parameter("y", s);
auto cond = mm->add_parameter("cond", sc);
auto* then_mod = p.create_module("If_5_if"); auto* then_mod = p.create_module("If_5_if");
auto rt = then_mod->add_instruction(migraphx::make_op("add"), x, l1); auto rt = then_mod->add_instruction(migraphx::make_op("add"), x, l1);
...@@ -2839,6 +2942,32 @@ TEST_CASE(if_then_test) ...@@ -2839,6 +2942,32 @@ TEST_CASE(if_then_test)
EXPECT(p == prog); EXPECT(p == prog);
} }
TEST_CASE(if_then_test_inlined)
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape sc{migraphx::shape::bool_type, {1}};
mm->add_literal(migraphx::literal(sc, {1}));
migraphx::shape s{migraphx::shape::float_type, {2, 3}};
std::vector<float> ones(s.elements(), 1.0f);
auto l1 = mm->add_literal(s, ones);
std::vector<float> rand = {-1.26487, -2.42279, 0.990835, 1.63072, 0.812238, -0.174946};
mm->add_literal(s, rand);
auto x = mm->add_parameter("x", s);
mm->add_parameter("y", s);
auto rt = mm->add_instruction(migraphx::make_op("add"), x, l1);
mm->add_return({rt});
auto prog = migraphx::parse_onnx("if_then_test_inlined.onnx");
EXPECT(p == prog);
}
TEST_CASE(if_tuple_test) TEST_CASE(if_tuple_test)
{ {
migraphx::program p; migraphx::program p;
...@@ -5639,53 +5768,67 @@ TEST_CASE(scatter_none_test) ...@@ -5639,53 +5768,67 @@ TEST_CASE(scatter_none_test)
TEST_CASE(scatternd_test) TEST_CASE(scatternd_test)
{ {
{ migraphx::program p;
migraphx::program p; auto* mm = p.get_main_module();
auto* mm = p.get_main_module(); auto l0 = mm->add_parameter("data", migraphx::shape{migraphx::shape::float_type, {2, 2, 2}});
auto l0 = auto l1 = mm->add_parameter("indices", migraphx::shape{migraphx::shape::int64_type, {2, 1, 2}});
mm->add_parameter("data", migraphx::shape{migraphx::shape::float_type, {2, 2, 2}}); auto l2 = mm->add_parameter("updates", migraphx::shape{migraphx::shape::float_type, {2, 1, 2}});
auto l1 = auto r = mm->add_instruction(migraphx::make_op("scatternd_none"), l0, l1, l2);
mm->add_parameter("indices", migraphx::shape{migraphx::shape::int64_type, {2, 1, 2}}); mm->add_return({r});
auto l2 = auto prog = migraphx::parse_onnx("scatternd_test.onnx");
mm->add_parameter("updates", migraphx::shape{migraphx::shape::float_type, {2, 1, 2}});
auto r = mm->add_instruction(migraphx::make_op("scatternd_none"), l0, l1, l2);
mm->add_return({r});
auto prog = migraphx::parse_onnx("scatternd_test.onnx");
EXPECT(p == prog); EXPECT(p == prog);
} }
{ TEST_CASE(scatternd_dyn_test)
migraphx::program p; {
auto* mm = p.get_main_module(); // dynamic input.
auto l0 = migraphx::program p;
mm->add_parameter("data", migraphx::shape{migraphx::shape::float_type, {2, 2, 2}}); auto* mm = p.get_main_module();
auto l1 = // parameters with dynamic dimensions
mm->add_parameter("indices", migraphx::shape{migraphx::shape::int64_type, {2, 1, 2}}); auto l0 = mm->add_parameter(
auto l2 = "data", migraphx::shape{migraphx::shape::float_type, {{1, 3, 2}, {2, 2}, {2, 2}}});
mm->add_parameter("updates", migraphx::shape{migraphx::shape::float_type, {2, 1, 2}}); auto l1 = mm->add_parameter(
auto r = mm->add_instruction(migraphx::make_op("scatternd_add"), l0, l1, l2); "indices", migraphx::shape{migraphx::shape::int64_type, {{2, 1, 2}, {1, 1}, {2, 2}}});
mm->add_return({r}); auto l2 = mm->add_parameter(
auto prog = migraphx::parse_onnx("scatternd_add_test.onnx"); "updates", migraphx::shape{migraphx::shape::float_type, {{2, 1, 2}, {1, 1}, {2, 2}}});
auto r = mm->add_instruction(migraphx::make_op("scatternd_none"), l0, l1, l2);
mm->add_return({r});
migraphx::onnx_options options;
options.map_dyn_input_dims["data"] = {{1, 3, 2}, {2, 2}, {2, 2}};
options.map_dyn_input_dims["indices"] = {{2, 1, 2}, {1, 1}, {2, 2}};
options.map_dyn_input_dims["updates"] = {{2, 1, 2}, {1, 1}, {2, 2}};
auto prog = migraphx::parse_onnx("scatternd_dyn_test.onnx", options);
EXPECT(p == prog); EXPECT(p == prog);
} }
{ TEST_CASE(scatternd_add_test)
migraphx::program p; {
auto* mm = p.get_main_module(); migraphx::program p;
auto l0 = auto* mm = p.get_main_module();
mm->add_parameter("data", migraphx::shape{migraphx::shape::float_type, {2, 2, 2}}); auto l0 = mm->add_parameter("data", migraphx::shape{migraphx::shape::float_type, {2, 2, 2}});
auto l1 = auto l1 = mm->add_parameter("indices", migraphx::shape{migraphx::shape::int64_type, {2, 1, 2}});
mm->add_parameter("indices", migraphx::shape{migraphx::shape::int64_type, {2, 1, 2}}); auto l2 = mm->add_parameter("updates", migraphx::shape{migraphx::shape::float_type, {2, 1, 2}});
auto l2 = auto r = mm->add_instruction(migraphx::make_op("scatternd_add"), l0, l1, l2);
mm->add_parameter("updates", migraphx::shape{migraphx::shape::float_type, {2, 1, 2}}); mm->add_return({r});
auto r = mm->add_instruction(migraphx::make_op("scatternd_mul"), l0, l1, l2); auto prog = migraphx::parse_onnx("scatternd_add_test.onnx");
mm->add_return({r});
auto prog = migraphx::parse_onnx("scatternd_mul_test.onnx");
EXPECT(p == prog); EXPECT(p == prog);
} }
TEST_CASE(scatternd_mul_test)
{
migraphx::program p;
auto* mm = p.get_main_module();
auto l0 = mm->add_parameter("data", migraphx::shape{migraphx::shape::float_type, {2, 2, 2}});
auto l1 = mm->add_parameter("indices", migraphx::shape{migraphx::shape::int64_type, {2, 1, 2}});
auto l2 = mm->add_parameter("updates", migraphx::shape{migraphx::shape::float_type, {2, 1, 2}});
auto r = mm->add_instruction(migraphx::make_op("scatternd_mul"), l0, l1, l2);
mm->add_return({r});
auto prog = migraphx::parse_onnx("scatternd_mul_test.onnx");
EXPECT(p == prog);
} }
TEST_CASE(selu_test) TEST_CASE(selu_test)
......
...@@ -590,6 +590,28 @@ TEST_CASE(if_else_test) ...@@ -590,6 +590,28 @@ TEST_CASE(if_else_test)
p.compile(migraphx::ref::target{}); p.compile(migraphx::ref::target{});
migraphx::shape s_data{migraphx::shape::float_type, {2, 3}}; migraphx::shape s_data{migraphx::shape::float_type, {2, 3}};
std::vector<float> data = {0.0625, 0.75, -0.0625, 0.125, -0.125, -0.5625}; std::vector<float> data = {0.0625, 0.75, -0.0625, 0.125, -0.125, -0.5625};
migraphx::shape bool_data{migraphx::shape::bool_type, {1}};
bool b_data = false;
migraphx::parameter_map pp;
pp["x"] = migraphx::argument(s_data, data.data());
pp["y"] = migraphx::argument(s_data, data.data());
pp["cond"] = migraphx::argument(bool_data, &b_data);
auto result = p.eval(pp).back();
std::vector<float> result_vector;
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
std::vector<float> gold = {0.0866565, -0.371067, 0.017719, 0.0250614, 0.0612539, -0.744683};
EXPECT(migraphx::verify_range(result_vector, gold));
}
TEST_CASE(if_else_test_inlined)
{
migraphx::program p = migraphx::parse_onnx("if_else_test_inlined.onnx");
p.compile(migraphx::ref::target{});
migraphx::shape s_data{migraphx::shape::float_type, {2, 3}};
std::vector<float> data = {0.0625, 0.75, -0.0625, 0.125, -0.125, -0.5625};
migraphx::parameter_map pp; migraphx::parameter_map pp;
pp["x"] = migraphx::argument(s_data, data.data()); pp["x"] = migraphx::argument(s_data, data.data());
...@@ -599,8 +621,49 @@ TEST_CASE(if_else_test) ...@@ -599,8 +621,49 @@ TEST_CASE(if_else_test)
std::vector<float> result_vector; std::vector<float> result_vector;
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); }); result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
std::vector<float> gold = { std::vector<float> gold = {0.0507132, -0.712328, 0.0105797, 0.04569, 0.0185013, -1.16472};
-0.0364609435, 0.475317657, -0.00417715637, -0.0599277429, 0.0755792186, -0.0218581557}; EXPECT(migraphx::verify_range(result_vector, gold));
}
TEST_CASE(if_then_test)
{
migraphx::program p = migraphx::parse_onnx("if_then_test.onnx");
p.compile(migraphx::ref::target{});
migraphx::shape s_data{migraphx::shape::float_type, {2, 3}};
std::vector<float> data = {0.0625, 0.75, -0.0625, 0.125, -0.125, -0.5625};
migraphx::shape bool_data{migraphx::shape::bool_type, {1}};
bool b_data = true;
migraphx::parameter_map pp;
pp["x"] = migraphx::argument(s_data, data.data());
pp["y"] = migraphx::argument(s_data, data.data());
pp["cond"] = migraphx::argument(bool_data, &b_data);
auto result = p.eval(pp).back();
std::vector<float> result_vector;
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
// onnx adds ones so result should be just + 1.0
std::vector<float> gold = {1.0625, 1.75, 0.9375, 1.125, 0.875, 0.4375};
EXPECT(migraphx::verify_range(result_vector, gold));
}
TEST_CASE(if_then_test_inlined)
{
migraphx::program p = migraphx::parse_onnx("if_then_test_inlined.onnx");
p.compile(migraphx::ref::target{});
migraphx::shape s_data{migraphx::shape::float_type, {2, 3}};
std::vector<float> data = {0.0625, 0.75, -0.0625, 0.125, -0.125, -0.5625};
migraphx::parameter_map pp;
pp["x"] = migraphx::argument(s_data, data.data());
pp["y"] = migraphx::argument(s_data, data.data());
auto result = p.eval(pp).back();
std::vector<float> result_vector;
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
std::vector<float> gold = {1.0625, 1.75, 0.9375, 1.125, 0.875, 0.4375};
EXPECT(migraphx::verify_range(result_vector, gold)); EXPECT(migraphx::verify_range(result_vector, gold));
} }
...@@ -637,6 +700,67 @@ TEST_CASE(if_literal_test) ...@@ -637,6 +700,67 @@ TEST_CASE(if_literal_test)
} }
} }
TEST_CASE(if_then_else_multi_output_shapes_inlined_test)
{
migraphx::program p =
migraphx::parse_onnx("if_then_else_multi_output_shapes_inlined_test.onnx");
p.compile(migraphx::ref::target{});
migraphx::shape x_data{migraphx::shape::float_type, {2, 3, 1}};
migraphx::shape y_data{migraphx::shape::float_type, {2, 3}};
std::vector<float> data = {0.0625, 0.75, -0.0625, 0.125, -0.125, -0.5625};
migraphx::parameter_map pp;
pp["x"] = migraphx::argument(x_data, data.data());
pp["y"] = migraphx::argument(y_data, data.data());
auto result_args = p.eval(pp);
auto result = result_args.front();
auto result_b = result_args.back();
std::vector<float> result_vector;
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
std::vector<float> result_vector_back;
result_b.visit([&](auto output) { result_vector_back.assign(output.begin(), output.end()); });
result_vector.insert(result_vector.end(), result_vector_back.begin(), result_vector_back.end());
std::vector<float> gold = {
1.0625, 1.75, 0.9375, 1.125, 0.875, 0.4375, 0.125, 1.50, -0.125, 0.250, -0.250, -1.125};
EXPECT(migraphx::verify_range(result_vector, gold));
}
TEST_CASE(if_then_else_multi_output_shapes_test)
{
migraphx::program p = migraphx::parse_onnx("if_then_else_multi_output_shapes_test.onnx");
p.compile(migraphx::ref::target{});
migraphx::shape s_data{migraphx::shape::float_type, {2, 3, 1}};
std::vector<float> data = {0.0625, 0.75, -0.0625, 0.125, -0.125, -0.5625};
migraphx::shape bool_data{migraphx::shape::bool_type, {1}};
bool b_data = true;
migraphx::parameter_map pp;
pp["x"] = migraphx::argument(s_data, data.data());
pp["y"] = migraphx::argument(s_data, data.data());
pp["cond"] = migraphx::argument(bool_data, &b_data);
auto result_args = p.eval(pp);
auto result = result_args.front();
auto result_b = result_args.back();
std::vector<float> result_vector;
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
std::vector<float> result_vector_back;
result_b.visit([&](auto output) { result_vector_back.assign(output.begin(), output.end()); });
result_vector.insert(result_vector.end(), result_vector_back.begin(), result_vector_back.end());
std::vector<float> gold = {
1.0625, 1.75, 0.9375, 1.125, 0.875, 0.4375, 0.125, 1.50, -0.125, 0.250, -0.250, -1.125};
EXPECT(migraphx::verify_range(result_vector, gold));
}
TEST_CASE(if_pl_test) TEST_CASE(if_pl_test)
{ {
auto run_prog = [](bool cond) { auto run_prog = [](bool cond) {
......
...@@ -2477,27 +2477,359 @@ TEST_CASE(test_scalar_nelemnts) ...@@ -2477,27 +2477,359 @@ TEST_CASE(test_scalar_nelemnts)
throws_shape(migraphx::make_op("scalar", {{"scalar_bcst_dims", {2, 3, 4, 5}}}), input); throws_shape(migraphx::make_op("scalar", {{"scalar_bcst_dims", {2, 3, 4, 5}}}), input);
} }
TEST_CASE(test_scatternd) TEST_CASE(test_gathernd)
{ {
{ {
// k > r // k > r
auto dtype = migraphx::shape::float_type; auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type; auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 4}};
migraphx::shape ds{dtype, {8}}; migraphx::shape ds{dtype, {8}};
migraphx::shape is{itype, {4, 2}};
migraphx::shape us{dtype, {4}}; int batch_dims(1);
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us); throws_shape(migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
} }
{ {
// update.lens != indices.lens[0:q-1] ++ data.lens[k:r-1] // k > r - batch_dims
auto dtype = migraphx::shape::float_type; auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type; auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8}}; migraphx::shape is{itype, {2, 4}};
migraphx::shape is{itype, {4, 1}}; migraphx::shape ds{dtype, {2}};
migraphx::shape us{dtype, {2, 2}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us); int batch_dims(1);
throws_shape(migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
} }
{
// batch_dims >= r
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 1}};
migraphx::shape ds{dtype, {2, 5, 6, 7}};
int batch_dims(3);
throws_shape(migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
{
// int(q) + r - k - batch_dims - 1 = 0 => returns a scalar
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {1}};
migraphx::shape ds{dtype, {2}};
migraphx::shape s0{dtype, {1}};
expect_shape(s0, migraphx::make_op("gathernd"), ds, is);
}
{
// See Example 4 at https://github.com/onnx/onnx/blob/main/docs/Operators.md#GatherND
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 2}};
migraphx::shape ds{dtype, {2, 2}};
migraphx::shape s0{dtype, {2}};
expect_shape(s0, migraphx::make_op("gathernd"), ds, is);
}
{
// See Example 5 at https://github.com/onnx/onnx/blob/main/docs/Operators.md#GatherND
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 1}};
migraphx::shape ds{dtype, {2, 2, 2}};
int batch_dims(1);
migraphx::shape s0{dtype, {2, 2}};
expect_shape(s0, migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
}
TEST_CASE(test_gathernd_dynamic0)
{
// k > r
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 4}};
std::vector<migraphx::shape::dynamic_dimension> b{{8, 8, 0}};
migraphx::shape ds{dtype, b};
int batch_dims(1);
throws_shape(migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_gathernd_dynamic1)
{
// k > r - batch_dims
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 4}};
std::vector<migraphx::shape::dynamic_dimension> b{{2, 2, 0}};
migraphx::shape ds{dtype, b};
int batch_dims(1);
throws_shape(migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_gathernd_dynamic2)
{
// batch_dims >= r
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 1}};
migraphx::shape ds{dtype, {{2, 3, 3}, {5, 6, 5}, {6, 9, 7}, {7, 8, 8}}};
int batch_dims(3);
throws_shape(migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_gathernd_dynamic3)
{
// int(q) + r - k - batch_dims - 1 = 0 => returns a scalar
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {1}};
std::vector<migraphx::shape::dynamic_dimension> b{{2, 2, 0}};
migraphx::shape ds{dtype, b};
migraphx::shape::dynamic_dimension ddout{1, 1, 0};
migraphx::shape s0{dtype, {ddout}};
expect_shape(s0, migraphx::make_op("gathernd"), ds, is);
}
TEST_CASE(test_gathernd_dynamic4)
{
// See Example 1 at https://github.com/onnx/onnx/blob/main/docs/Operators.md#GatherND
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 2}};
std::vector<migraphx::shape::dynamic_dimension> b{{2, 2, 0}, {2, 2, 0}};
migraphx::shape ds{dtype, b};
migraphx::shape::dynamic_dimension ddout{2, 2, 0};
migraphx::shape s0{dtype, {ddout}};
expect_shape(s0, migraphx::make_op("gathernd"), ds, is);
}
TEST_CASE(test_gathernd_dynamic5)
{
// See Example 5 at https://github.com/onnx/onnx/blob/main/docs/Operators.md#GatherND
// index static shape, data dynamic
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 1}};
std::vector<migraphx::shape::dynamic_dimension> b{{2, 2, 0}, {2, 2, 0}, {2, 2, 0}};
migraphx::shape ds{dtype, b};
std::vector<migraphx::shape::dynamic_dimension> ddout{{2, 2, 0}, {2, 2, 0}};
int batch_dims(1);
migraphx::shape s0{dtype, {ddout}};
expect_shape(s0, migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_gathernd_dynamic6)
{
// See Example 5 at https://github.com/onnx/onnx/blob/main/docs/Operators.md#GatherND
// index dynamic shape, data static
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
std::vector<migraphx::shape::dynamic_dimension> b{{2, 3, 0}, {1, 1, 0}};
migraphx::shape is{itype, b};
migraphx::shape ds{dtype, {2, 2, 2}};
std::vector<migraphx::shape::dynamic_dimension> ddout{{2, 3, 0}, {2, 2, 0}};
int batch_dims(1);
migraphx::shape s0{dtype, {ddout}};
expect_shape(s0, migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_gathernd_dynamic6a)
{
// indices with non-fixed dynamic dimension k
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
std::vector<migraphx::shape::dynamic_dimension> b{{2, 2, 0}, {1, 3, 0}};
migraphx::shape is{itype, b};
migraphx::shape ds{dtype, {2, 2, 2}};
int batch_dims(1);
throws_shape(migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_gathernd_dynamic7)
{
// See Example 5 at https://github.com/onnx/onnx/blob/main/docs/Operators.md#GatherND
// index and data both dynamic shapes
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
std::vector<migraphx::shape::dynamic_dimension> idyn{{2, 5, 0}, {1, 1, 0}};
migraphx::shape is{itype, idyn};
std::vector<migraphx::shape::dynamic_dimension> bdyn{{1, 2, 0}, {1, 2, 0}, {1, 2, 0}};
migraphx::shape ds{dtype, bdyn};
std::vector<migraphx::shape::dynamic_dimension> ddout{{2, 5, 0}, {1, 2, 0}};
int batch_dims(1);
migraphx::shape s0{dtype, {ddout}};
expect_shape(s0, migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_gathernd_dynamic8)
{
// Same shapes as ref_ops_test gathernd_dynamic
// index static shape, data dynamic
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape is{itype, {2, 5, 1}};
std::vector<migraphx::shape::dynamic_dimension> b{{6, 7, 7}, {3, 3, 0}, {1, 4, 0}};
migraphx::shape ds{dtype, b};
std::vector<migraphx::shape::dynamic_dimension> ddout{{2, 2, 0}, {5, 5, 0}, {1, 4, 0}};
int batch_dims(1);
migraphx::shape s0{dtype, {ddout}};
expect_shape(s0, migraphx::make_op("gathernd", {{"batch_dims", batch_dims}}), ds, is);
}
TEST_CASE(test_scatternd0)
{
// good
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8}};
migraphx::shape is{itype, {4, 1}};
migraphx::shape us{dtype, {4}};
expect_shape(ds, migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd1)
{
// good, broadcasted
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8}};
migraphx::shape is{itype, {4, 1}, {4, 0}};
migraphx::shape us{dtype, {4}};
expect_shape(ds, migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd2)
{
// too many inputs
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8}};
migraphx::shape is{itype, {4, 1}};
migraphx::shape us{dtype, {4}};
migraphx::shape zs{dtype, {4}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us, zs);
}
TEST_CASE(test_scatternd3)
{
// q + r - k - 1 matches upd_lens.size(), but k > r
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8}};
migraphx::shape is{itype, {5, 4, 2}};
migraphx::shape us{dtype, {4}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd4)
{
// q + r - k - 1 != upd_lens.size()
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8}};
migraphx::shape is{itype, {4, 1}};
migraphx::shape us{dtype, {2, 2}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd5)
{
// dimensions don't match: update.lens != indices.lens[0:q-1]
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8, 3}};
migraphx::shape is{itype, {4, 1}};
migraphx::shape us{dtype, {2, 2}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd_dyn0)
{
// one dynamic input, invalid index
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {4}};
migraphx::shape is{itype, {4, 13}};
migraphx::shape::dynamic_dimension dd{4, 4, 0};
migraphx::shape us{dtype, {dd}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd_dyn1)
{
// one dynamic input
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {8}};
migraphx::shape is{itype, {4, 1}};
migraphx::shape::dynamic_dimension dd{4, 4, 0};
migraphx::shape us{dtype, {dd}};
expect_shape(ds, migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd_dyn2)
{
// one dynamic input and broadcasted data
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {2, 3, 1, 4}, {0, 1, 1, 0}};
migraphx::shape ds_std{dtype, {2, 3, 1, 4}};
migraphx::shape is{itype, {4, 4}};
migraphx::shape::dynamic_dimension dd{4, 4, 0};
migraphx::shape us{dtype, {dd}};
expect_shape(ds_std, migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd_dyn3)
{
// one dynamic input and standard, static data
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {2, 3, 1, 4}};
migraphx::shape is{itype, {4, 4}};
migraphx::shape::dynamic_dimension dd{4, 4, 0};
migraphx::shape us{dtype, {dd}};
expect_shape(ds, migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd_dyn4)
{
// index is dynamic with last dimension not fixed
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {2, 3, 1, 4}};
migraphx::shape::dynamic_dimension dd{4, 5, 0};
migraphx::shape is{itype, {dd, dd}};
migraphx::shape us{dtype, {dd}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us);
}
TEST_CASE(test_scatternd_dyn5)
{
// dimensions don't match: update.lens != indices.lens[0:q-1]
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape ds{dtype, {2, 3, 1, 4}};
migraphx::shape::dynamic_dimension dd{4, 4, 0};
migraphx::shape::dynamic_dimension dbad{2, 3, 0};
migraphx::shape is{itype, {dd, dd}};
migraphx::shape us{dtype, {dbad}};
throws_shape(migraphx::make_op("scatternd_none"), ds, is, us);
} }
TEST_CASE(test_squeeze) TEST_CASE(test_squeeze)
......
...@@ -2746,6 +2746,187 @@ TEST_CASE(gathernd_test) ...@@ -2746,6 +2746,187 @@ TEST_CASE(gathernd_test)
} }
} }
TEST_CASE(gathernd_dynamic0)
{
// dynamic data, all dimensions fixed
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape ds{migraphx::shape::float_type, {{2, 2, 2}, {3, 3, 0}, {1, 1, 0}}};
migraphx::shape is{migraphx::shape::int64_type, {2, 2, 1}};
auto xdata = mm->add_parameter("X", ds);
auto xindex = mm->add_parameter("I", is);
auto gathernd_op = migraphx::make_op("gathernd");
auto gathernd = mm->add_instruction(gathernd_op, xdata, xindex);
mm->add_return({gathernd});
p.compile(migraphx::ref::target{});
migraphx::parameter_map params;
migraphx::shape input_fixed_shape0{migraphx::shape::float_type, {2, 3, 1}}; // data
migraphx::shape input_fixed_shape1{migraphx::shape::int64_type, {2, 2, 1}}; // index
std::vector<float> data_vec(2 * 3 * 1);
std::iota(data_vec.begin(), data_vec.end(), 0);
std::vector<int64_t> indices_vec{1, 0, 0, 1};
params["X"] = migraphx::argument(input_fixed_shape0, data_vec.data());
params["I"] = migraphx::argument(input_fixed_shape1, indices_vec.data());
auto result = p.eval(params).back();
std::vector<float> res_data{};
std::vector<float> gold{3, 4, 5, 0, 1, 2, 0, 1, 2, 3, 4, 5};
result.visit([&](auto output) { res_data.assign(output.begin(), output.end()); });
EXPECT(migraphx::verify_range(res_data, gold));
}
TEST_CASE(gathernd_dynamic1)
{
// dynamic data, dims not fixed
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape ds{migraphx::shape::float_type, {{2, 5, 2}, {1, 5, 0}, {1, 5, 0}}};
migraphx::shape is{migraphx::shape::int64_type, {2, 2, 1}};
auto xdata = mm->add_parameter("X", ds);
auto xindex = mm->add_parameter("I", is);
auto gathernd_op = migraphx::make_op("gathernd");
auto gathernd = mm->add_instruction(gathernd_op, xdata, xindex);
mm->add_return({gathernd});
p.compile(migraphx::ref::target{});
migraphx::parameter_map params;
migraphx::shape input_fixed_shape0{migraphx::shape::float_type, {2, 3, 1}}; // data
migraphx::shape input_fixed_shape1{migraphx::shape::int64_type, {2, 2, 1}}; // index
std::vector<float> data_vec(2 * 3 * 1);
std::iota(data_vec.begin(), data_vec.end(), 0);
std::vector<int64_t> indices_vec{1, 0, 0, 1};
params["X"] = migraphx::argument(input_fixed_shape0, data_vec.data());
params["I"] = migraphx::argument(input_fixed_shape1, indices_vec.data());
auto result = p.eval(params).back();
std::vector<float> res_data{};
std::vector<float> gold{3, 4, 5, 0, 1, 2, 0, 1, 2, 3, 4, 5};
result.visit([&](auto output) { res_data.assign(output.begin(), output.end()); });
EXPECT(migraphx::verify_range(res_data, gold));
}
TEST_CASE(gathernd_dynamic2)
{
// dynamic both index and data
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape ds{migraphx::shape::float_type, {{2, 5, 2}, {1, 5, 0}, {1, 5, 0}}};
migraphx::shape is{migraphx::shape::int64_type, {{2, 5, 3}, {2, 3, 3}, {1, 1}}};
auto xdata = mm->add_parameter("X", ds);
auto xindex = mm->add_parameter("I", is);
auto gathernd_op = migraphx::make_op("gathernd");
auto gathernd = mm->add_instruction(gathernd_op, xdata, xindex);
mm->add_return({gathernd});
p.compile(migraphx::ref::target{});
migraphx::parameter_map params;
migraphx::shape input_fixed_shape0{migraphx::shape::float_type, {2, 3, 1}}; // data
migraphx::shape input_fixed_shape1{migraphx::shape::int64_type, {2, 2, 1}}; // index
std::vector<float> data_vec(2 * 3 * 1);
std::iota(data_vec.begin(), data_vec.end(), 0);
std::vector<int64_t> indices_vec{1, 0, 0, 1};
params["X"] = migraphx::argument(input_fixed_shape0, data_vec.data());
params["I"] = migraphx::argument(input_fixed_shape1, indices_vec.data());
auto result = p.eval(params).back();
std::vector<float> res_data{};
std::vector<float> gold{3, 4, 5, 0, 1, 2, 0, 1, 2, 3, 4, 5};
result.visit([&](auto output) { res_data.assign(output.begin(), output.end()); });
EXPECT(migraphx::verify_range(res_data, gold));
}
TEST_CASE(gathernd_dynamic3)
{
// dynamic index, static data and a batch_dims input
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape ds{migraphx::shape::float_type, {2, 3, 1}};
migraphx::shape is{migraphx::shape::int64_type, {{2, 5, 3}, {2, 3, 3}, {1, 1}}};
auto xdata = mm->add_parameter("X", ds);
auto xindex = mm->add_parameter("I", is);
int batch_dims{1};
auto gathernd_op = migraphx::make_op("gathernd", {{"batch_dims", batch_dims}});
auto gathernd = mm->add_instruction(gathernd_op, xdata, xindex);
mm->add_return({gathernd});
p.compile(migraphx::ref::target{});
migraphx::parameter_map params;
migraphx::shape input_fixed_shape0{migraphx::shape::float_type, {2, 3, 1}}; // data
migraphx::shape input_fixed_shape1{migraphx::shape::int64_type, {2, 2, 1}}; // index
std::vector<float> data_vec(2 * 3 * 1);
std::iota(data_vec.begin(), data_vec.end(), 0);
std::vector<int64_t> indices_vec{1, 0, 0, 1};
params["X"] = migraphx::argument(input_fixed_shape0, data_vec.data());
params["I"] = migraphx::argument(input_fixed_shape1, indices_vec.data());
auto result = p.eval(params).back();
std::vector<float> res_data{};
std::vector<float> gold{1, 0, 3, 4};
result.visit([&](auto output) { res_data.assign(output.begin(), output.end()); });
EXPECT(migraphx::verify_range(res_data, gold));
}
TEST_CASE(gathernd_dynamic4)
{
// int(q) + r - k - batch_dims - 1 = 0 => returns a scalar
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape ds{migraphx::shape::float_type,
{migraphx::shape::dynamic_dimension({2, 2, 0})}};
migraphx::shape is{migraphx::shape::int64_type, {1}};
auto xdata = mm->add_parameter("X", ds);
auto xindex = mm->add_parameter("I", is);
auto gathernd_op = migraphx::make_op("gathernd");
auto gathernd = mm->add_instruction(gathernd_op, xdata, xindex);
mm->add_return({gathernd});
p.compile(migraphx::ref::target{});
migraphx::parameter_map params;
migraphx::shape input_fixed_shape0{migraphx::shape::float_type, {2}}; // data
migraphx::shape input_fixed_shape1{migraphx::shape::int64_type, {1}}; // index
std::vector<float> data_vec(2);
std::iota(data_vec.begin(), data_vec.end(), 4);
std::vector<int64_t> indices_vec{1};
params["X"] = migraphx::argument(input_fixed_shape0, data_vec.data());
params["I"] = migraphx::argument(input_fixed_shape1, indices_vec.data());
auto result = p.eval(params).back();
std::vector<float> res_data{};
std::vector<float> gold{5};
result.visit([&](auto output) { res_data.assign(output.begin(), output.end()); });
EXPECT(migraphx::verify_range(res_data, gold));
}
TEST_CASE(gathernd_negative_index_test) TEST_CASE(gathernd_negative_index_test)
{ {
{ {
...@@ -7061,6 +7242,51 @@ TEST_CASE(scatternd_reduction_test) ...@@ -7061,6 +7242,51 @@ TEST_CASE(scatternd_reduction_test)
} }
} }
TEST_CASE(scatternd_reduction_dyn_test)
{
// reduction = add, with dynamic input shapes
migraphx::program p;
auto* mm = p.get_main_module();
auto dtype = migraphx::shape::float_type;
auto itype = migraphx::shape::int64_type;
migraphx::shape::dynamic_dimension dd{3, 6, 0};
migraphx::shape ds{migraphx::shape::float_type, {dd, dd, dd}};
migraphx::shape is{itype, {2, 1}};
migraphx::shape us{dtype, {{2, 2, 0}, dd, dd}};
auto xdata = mm->add_parameter("X", ds);
auto xindex = mm->add_parameter("I", is);
auto xupdates = mm->add_parameter("U", us);
auto scatternd_add_op = migraphx::make_op("scatternd_add");
auto scatternd = mm->add_instruction(scatternd_add_op, xdata, xindex, xupdates);
mm->add_return({scatternd});
p.compile(migraphx::ref::target{});
migraphx::parameter_map params;
migraphx::shape input_fixed_shape0{migraphx::shape::float_type, {4, 4, 4}}; // data
std::vector<float> input_data{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1, 1, 2, 3, 4, 5, 6,
7, 8, 8, 7, 6, 5, 4, 3, 2, 1, 8, 7, 6, 5, 4, 3, 2, 1, 1, 2, 3, 4,
5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1, 1, 2, 3, 4, 5, 6, 7, 8};
std::vector<uint64_t> input_index{0, 2};
migraphx::shape input_fixed_shape1{migraphx::shape::float_type, {2, 4, 4}}; // updates
std::vector<float> input_updates{5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8,
1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4};
params["X"] = migraphx::argument(input_fixed_shape0, input_data.data());
params["I"] = migraphx::argument(is, input_index.data());
params["U"] = migraphx::argument(input_fixed_shape1, input_updates.data());
auto result = p.eval(params).back();
std::vector<float> results_vector;
result.visit([&](auto output) { results_vector.assign(output.begin(), output.end()); });
std::vector<float> gold{6, 7, 8, 9, 11, 12, 13, 14, 15, 14, 13, 12, 12, 11, 10, 9,
1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1,
9, 8, 7, 6, 6, 5, 4, 3, 4, 5, 6, 7, 9, 10, 11, 12,
8, 7, 6, 5, 4, 3, 2, 1, 1, 2, 3, 4, 5, 6, 7, 8};
EXPECT(migraphx::verify_range(results_vector, gold));
}
TEST_CASE(sigmoid_test) TEST_CASE(sigmoid_test)
{ {
migraphx::program p; migraphx::program p;
......
...@@ -25,6 +25,7 @@ import argparse ...@@ -25,6 +25,7 @@ import argparse
import numpy as np import numpy as np
import migraphx import migraphx
import onnxruntime as ort import onnxruntime as ort
import sys
def parse_args(): def parse_args():
...@@ -33,15 +34,13 @@ def parse_args(): ...@@ -33,15 +34,13 @@ def parse_args():
'MIGraphX accuracy checker. Use to verify onnx files to ensure MIGraphX\'s output \ 'MIGraphX accuracy checker. Use to verify onnx files to ensure MIGraphX\'s output \
is within tolerance of onnx runtime\'s expected output.' is within tolerance of onnx runtime\'s expected output.'
) )
req_args = parser.add_argument_group(title='required arguments') file_args = parser.add_argument_group(title='file type arguments')
req_args.add_argument('--onnx', file_args.add_argument('--onnx', type=str, help='path to onnx file')
type=str, file_args.add_argument('--tf', type=str, help='path to tf pb file')
required=True, parser.add_argument('--provider',
help='path to onnx file') type=str,
req_args.add_argument('--provider', default='CPUExecutionProvider',
type=str, help='execution provider for onnx runtime \
default='CPUExecutionProvider',
help='execution provider for onnx runtime \
(default = CPUExecutionProvider)') (default = CPUExecutionProvider)')
parser.add_argument('--batch', parser.add_argument('--batch',
type=int, type=int,
...@@ -50,6 +49,9 @@ def parse_args(): ...@@ -50,6 +49,9 @@ def parse_args():
parser.add_argument('--fill1', parser.add_argument('--fill1',
action='store_true', action='store_true',
help='fill all arguments with a value of 1') help='fill all arguments with a value of 1')
parser.add_argument('--fill0',
action='store_true',
help='fill all arguments with a value of 0')
parser.add_argument('--verbose', parser.add_argument('--verbose',
action='store_true', action='store_true',
help='show verbose information (for debugging)') help='show verbose information (for debugging)')
...@@ -57,6 +59,12 @@ def parse_args(): ...@@ -57,6 +59,12 @@ def parse_args():
type=float, type=float,
default=1e-3, default=1e-3,
help='accuracy tolerance (default = 1e-3)') help='accuracy tolerance (default = 1e-3)')
parser.add_argument('--input-dim',
type=str,
action='append',
help='specify input parameter dimension \
with the following format --input_dim input_name:dim0,dim1,dim2...'
)
args = parser.parse_args() args = parser.parse_args()
return args return args
...@@ -111,42 +119,127 @@ def get_np_datatype(in_type): ...@@ -111,42 +119,127 @@ def get_np_datatype(in_type):
def main(): def main():
args = parse_args() args = parse_args()
use_onnx = True
if args.onnx == None:
use_onnx = False
if not use_onnx and args.tf == None:
print('Error: please specify either an onnx or tf pb file')
sys.exit(-1)
model_name = args.onnx model_name = args.onnx
batch = args.batch batch = args.batch
model = migraphx.parse_onnx(model_name, default_dim_value=batch) custom_inputs = args.input_dim
input_dims = {}
if custom_inputs != None:
for input in custom_inputs:
input_dim = ''.join(input.split(':')[:-1])
dims = [int(dim) for dim in input.split(':')[-1].split(',')]
input_dims[input_dim] = dims
if use_onnx:
if not input_dims:
model = migraphx.parse_onnx(model_name, default_dim_value=batch)
else:
model = migraphx.parse_onnx(model_name,
default_dim_value=batch,
map_input_dims=input_dims)
else:
model_name = args.tf
if not input_dims:
model = migraphx.parse_tf(model_name, batch_size=batch)
else:
model = migraphx.parse_tf(model_name,
batch_size=batch,
map_input_dims=input_dims)
if args.verbose: if args.verbose:
print(model) print(model)
model.compile(migraphx.get_target('gpu'), offload_copy=False) model.compile(migraphx.get_target('gpu'))
params = {} params = {}
test_inputs = {} test_inputs = {}
for name, shape in model.get_parameter_shapes().items(): for name, shape in model.get_parameter_shapes().items():
if args.verbose: if args.verbose:
print('Parameter {} -> {}'.format(name, shape)) print(f'Parameter {name} -> {shape}')
in_shape = shape.lens() in_shape = shape.lens()
in_type = shape.type_string() in_type = shape.type_string()
if not args.fill1: if not args.fill1 and not args.fill0:
test_input = np.random.rand(*(in_shape)).astype( test_input = np.random.rand(*(in_shape)).astype(
get_np_datatype(in_type)) get_np_datatype(in_type))
else: elif not args.fill0:
test_input = np.ones(in_shape).astype(get_np_datatype(in_type)) test_input = np.ones(in_shape).astype(get_np_datatype(in_type))
else:
test_input = np.zeros(in_shape).astype(get_np_datatype(in_type))
test_inputs[name] = test_input test_inputs[name] = test_input
params[name] = migraphx.to_gpu(migraphx.argument(test_input)) params[name] = migraphx.argument(test_input)
pred_migx = np.array(model.run(params)[-1])
pred_migx = np.array(migraphx.from_gpu(model.run(params)[-1])) if use_onnx:
sess = ort.InferenceSession(model_name, providers=[args.provider])
sess = ort.InferenceSession(model_name, providers=[args.provider]) ort_params = {}
for input in sess.get_inputs():
ort_params[input.name] = test_inputs[input.name]
try:
pred_fw = sess.run(None, ort_params)[-1]
except Exception as e:
if any(input_dims):
print(
'Error: custom input dim may not be compatible with onnx runtime'
)
raise e
else:
import tensorflow as tf
def load_tf_graph(model_name):
with tf.io.gfile.GFile(model_name, 'rb') as f:
graph_def = tf.compat.v1.GraphDef()
graph_def.ParseFromString(f.read())
with tf.compat.v1.Graph().as_default() as graph:
tf.graph_util.import_graph_def(graph_def)
return graph
graph = load_tf_graph(model_name)
is_nhwc = False
graph_ops = []
for op in graph.get_operations():
graph_ops.append(op.name)
if 'Conv' in op.node_def.op:
if 'NHWC' in op.get_attr('data_format').decode('utf-8'):
is_nhwc = True
graph_ops_set = set(graph_ops)
tf_dict = {}
for name in test_inputs.keys():
# graph.get_operations() adds 'import/' to the op name
tf_name = f'import/{name}'
if tf_name not in graph_ops_set:
continue
x = graph.get_tensor_by_name(f'{tf_name}:0')
tf_input = test_inputs[name]
# transpose input for NHWC model
if tf_input.ndim == 4 and is_nhwc:
tf_dict[x] = np.transpose(tf_input, (0, 2, 3, 1))
else:
tf_dict[x] = tf_input
ort_params = {} # assume last node in graph is output
for input in sess.get_inputs(): # TODO: let user specify op name for output
ort_params[input.name] = test_inputs[input.name] y = graph.get_tensor_by_name(f'{graph_ops[-1]}:0')
pred_ort = sess.run(None, ort_params)[-1] with tf.compat.v1.Session(graph=graph) as sess:
y_out = sess.run(y, feed_dict=tf_dict)
pred_fw = y_out
is_correct = check_correctness(pred_ort, pred_migx, args.tolerance, is_correct = check_correctness(pred_fw, pred_migx, args.tolerance,
args.tolerance, args.verbose) args.tolerance, args.verbose)
verbose_string = ' Rerun with --verbose for detailed information.' \ verbose_string = ' Rerun with --verbose for detailed information.' \
if not args.verbose else '' if not args.verbose else ''
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment