"...resnet50_tensorflow.git" did not exist on "423e109b97ae11b0b062db26510e0735950e66d8"
Commit dc7397bb authored by umangyadav's avatar umangyadav
Browse files

use fill_argument

parent 61875aee
...@@ -77,7 +77,7 @@ argument generate_argument(shape s, unsigned long seed) ...@@ -77,7 +77,7 @@ argument generate_argument(shape s, unsigned long seed)
{ {
using type = typename decltype(as)::type; using type = typename decltype(as)::type;
auto v = generate_tensor_data<type>(s, seed); auto v = generate_tensor_data<type>(s, seed);
result = {s, v}; result = {s, v};
} }
}); });
} }
......
...@@ -34,13 +34,13 @@ ...@@ -34,13 +34,13 @@
#include <migraphx/literal.hpp> #include <migraphx/literal.hpp>
#include <migraphx/instruction.hpp> #include <migraphx/instruction.hpp>
#include <migraphx/shape.hpp> #include <migraphx/shape.hpp>
#include <migraphx/verify.hpp>
#include <migraphx/make_op.hpp> #include <migraphx/make_op.hpp>
#include <migraphx/check_shapes.hpp> #include <migraphx/check_shapes.hpp>
#include <migraphx/functional.hpp> #include <migraphx/functional.hpp>
#include <basic_ops.hpp> #include <basic_ops.hpp>
#include <migraphx/compile_options.hpp> #include <migraphx/compile_options.hpp>
#include <migraphx/register_target.hpp> #include <migraphx/register_target.hpp>
#include <migraphx/generate.hpp>
#include "test.hpp" #include "test.hpp"
// check if it is custom_op or run_on_module operator // check if it is custom_op or run_on_module operator
...@@ -193,17 +193,12 @@ TEST_CASE(multitarget_compile_cpu_gpu) ...@@ -193,17 +193,12 @@ TEST_CASE(multitarget_compile_cpu_gpu)
p.compile({migraphx::make_target("gpu"), migraphx::make_target("cpu")}, {gpu_opts}); p.compile({migraphx::make_target("gpu"), migraphx::make_target("cpu")}, {gpu_opts});
EXPECT(check_compiled_program(p, {migraphx::make_target("gpu"), migraphx::make_target("cpu")})); EXPECT(check_compiled_program(p, {migraphx::make_target("gpu"), migraphx::make_target("cpu")}));
migraphx::parameter_map params; migraphx::parameter_map params;
std::vector<float> x_data(s.elements(), 1); params["x"] = migraphx::fill_argument(s, 1);
std::vector<float> y_data(s.elements(), 2); params["y"] = migraphx::fill_argument(s, 2);
std::vector<float> z_data(s.elements(), 3); params["z"] = migraphx::fill_argument(s, 3);
params["x"] = migraphx::argument(s, x_data.data());
params["y"] = migraphx::argument(s, y_data.data());
params["z"] = migraphx::argument(s, z_data.data());
auto result = p.eval(params).back(); auto result = p.eval(params).back();
std::vector<float> result_vector; auto gold = migraphx::fill_argument(s, 6);
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); }); EXPECT(gold == result);
std::vector<float> gold(s.elements(), 6);
EXPECT(migraphx::verify_range(gold, result_vector));
} }
TEST_CASE(single_target_multi_compile) TEST_CASE(single_target_multi_compile)
...@@ -245,14 +240,14 @@ TEST_CASE(single_target_multi_compile) ...@@ -245,14 +240,14 @@ TEST_CASE(single_target_multi_compile)
EXPECT(check_compiled_program(p, {migraphx::make_target("gpu"), migraphx::make_target("ref")})); EXPECT(check_compiled_program(p, {migraphx::make_target("gpu"), migraphx::make_target("ref")}));
// eval // eval
migraphx::parameter_map params; migraphx::parameter_map params;
std::vector<float> boxes_vec = {0.5, 0.5, 1.0, 1.0, 0.5, 0.6, 1.0, 1.0, 0.5, 0.4, 1.0, 1.0, std::vector<float> boxes_vec = {0.5, 0.5, 1.0, 1.0, 0.5, 0.6, 1.0, 1.0, 0.5, 0.4, 1.0, 1.0,
0.5, 10.5, 1.0, 1.0, 0.5, 10.6, 1.0, 1.0, 0.5, 100.5, 1.0, 1.0}; 0.5, 10.5, 1.0, 1.0, 0.5, 10.6, 1.0, 1.0, 0.5, 100.5, 1.0, 1.0};
params["boxes"] = migraphx::argument(boxes_s, boxes_vec.data()); params["boxes"] = migraphx::argument(boxes_s, boxes_vec.data());
auto output = p.eval(params).back(); auto output = p.eval(params).back();
std::vector<int64_t> result; std::vector<int64_t> gold_vec = {0, 0, 3, 0, 0, 0, 0, 0, 5};
output.visit([&](auto out) { result.assign(out.begin(), out.end()); }); auto gold =
std::vector<int64_t> gold = {0, 0, 3, 0, 0, 0, 0, 0, 5}; migraphx::argument(migraphx::shape{migraphx::shape::int64_type, {3, 3}}, gold_vec.data());
EXPECT(migraphx::verify_range(result, gold)); EXPECT(output == gold);
} }
TEST_CASE(multitarget_compile_if_then_else) TEST_CASE(multitarget_compile_if_then_else)
...@@ -303,18 +298,14 @@ TEST_CASE(multitarget_compile_if_then_else) ...@@ -303,18 +298,14 @@ TEST_CASE(multitarget_compile_if_then_else)
p.compile({migraphx::make_target("gpu"), migraphx::make_target("cpu")}, {gpu_opts}); p.compile({migraphx::make_target("gpu"), migraphx::make_target("cpu")}, {gpu_opts});
EXPECT(check_compiled_program(p, {migraphx::make_target("gpu"), migraphx::make_target("cpu")})); EXPECT(check_compiled_program(p, {migraphx::make_target("gpu"), migraphx::make_target("cpu")}));
migraphx::parameter_map params; migraphx::parameter_map params;
std::vector<float> x_data(ds.elements(), 2); params["x"] = migraphx::fill_argument(ds, 2);
std::vector<float> y_data(ds.elements(), 3); params["y"] = migraphx::fill_argument(ds, 3);
params["x"] = migraphx::argument(ds, x_data.data());
params["y"] = migraphx::argument(ds, y_data.data());
for(bool cond_val : {true, false}) for(bool cond_val : {true, false})
{ {
params["cond"] = migraphx::argument(cond_s, &cond_val); params["cond"] = migraphx::argument(cond_s, &cond_val);
auto result = p.eval(params).back(); auto result = p.eval(params).back();
std::vector<float> result_vector; auto gold = migraphx::fill_argument(ds, (cond_val ? 3 : 6));
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); }); EXPECT(gold == result);
std::vector<float> gold(ds.elements(), (cond_val ? 3 : 6));
EXPECT(migraphx::verify_range(gold, result_vector));
} }
} }
...@@ -449,13 +440,12 @@ TEST_CASE(multitarget_compile_nested_if_then_else) ...@@ -449,13 +440,12 @@ TEST_CASE(multitarget_compile_nested_if_then_else)
// do evaluation using different conditions // do evaluation using different conditions
// TODO: make two conditional to cover all the paths // TODO: make two conditional to cover all the paths
migraphx::parameter_map params; migraphx::parameter_map params;
int x_i = 2, y_i = 3, z_i = 4; float x_i = 2.0;
std::vector<float> x_data(ds.elements(), x_i); float y_i = 3.0;
std::vector<float> y_data(ds.elements(), y_i); float z_i = 4.0;
std::vector<float> z_data(ds.elements(), z_i); params["x"] = migraphx::fill_argument(ds, x_i);
params["x"] = migraphx::argument(ds, x_data.data()); params["y"] = migraphx::fill_argument(ds, y_i);
params["y"] = migraphx::argument(ds, y_data.data()); params["z"] = migraphx::fill_argument(ds, z_i);
params["z"] = migraphx::argument(ds, z_data.data());
// cover all paths with different combination of conditions // cover all paths with different combination of conditions
std::vector<std::pair<bool, bool>> test_conds = { std::vector<std::pair<bool, bool>> test_conds = {
{true, true}, {true, false}, {false, true}, {false, false}}; {true, true}, {true, false}, {false, true}, {false, false}};
...@@ -467,21 +457,19 @@ TEST_CASE(multitarget_compile_nested_if_then_else) ...@@ -467,21 +457,19 @@ TEST_CASE(multitarget_compile_nested_if_then_else)
// main has one instruction that is : if_then_else // main has one instruction that is : if_then_else
// then mod is doing : {tmp = x+y; (cond) ? (((x-1)*y)-z) : (((tmp-1)*y)-z);} // then mod is doing : {tmp = x+y; (cond) ? (((x-1)*y)-z) : (((tmp-1)*y)-z);}
// else mod is doing : {tmp = x+z; (cond) ? (((tmp-1)*x)-y) : (((z-1)*y)-x);} // else mod is doing : {tmp = x+z; (cond) ? (((tmp-1)*x)-y) : (((z-1)*y)-x);}
int gold_i = -1; float gold_i = -1.0;
if(cond_val_0) if(cond_val_0)
{ {
int tmp_i = x_i + y_i; float tmp_i = x_i + y_i;
gold_i = (cond_val_1) ? (((x_i - 1) * y_i) - z_i) : (((tmp_i - 1) * y_i) - z_i); gold_i = (cond_val_1) ? (((x_i - 1) * y_i) - z_i) : (((tmp_i - 1) * y_i) - z_i);
} }
else else
{ {
int tmp_i = x_i + z_i; float tmp_i = x_i + z_i;
gold_i = (cond_val_1) ? (((tmp_i - 1) * x_i) - y_i) : (((z_i - 1) * y_i) - x_i); gold_i = (cond_val_1) ? (((tmp_i - 1) * x_i) - y_i) : (((z_i - 1) * y_i) - x_i);
} }
std::vector<float> result_vector; auto gold = migraphx::fill_argument(ds, gold_i);
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); }); EXPECT(gold == result);
std::vector<float> gold(ds.elements(), gold_i);
EXPECT(migraphx::verify_range(gold, result_vector));
} }
} }
...@@ -571,26 +559,18 @@ TEST_CASE(multitarget_select_module) ...@@ -571,26 +559,18 @@ TEST_CASE(multitarget_select_module)
migraphx::make_target("ref"), migraphx::make_target("ref"),
migraphx::make_target("ref")})); migraphx::make_target("ref")}));
// program does the 12+x where x has dynamic shape {{1, 4}, {4, 4}} // program does the 12+x where x has dynamic shape {{1, 4}, {4, 4}}
float seed = 0.0f;
std::mt19937 gen(seed);
std::uniform_real_distribution<> dis(0.0, 1.0);
auto get_random_values = [&](size_t elements) {
std::vector<float> rand_samples(elements);
std::generate(rand_samples.begin(), rand_samples.end(), [&]() { return dis(gen); });
return rand_samples;
};
for(const size_t bs : {1, 2, 3, 4}) for(const size_t bs : {1, 2, 3, 4})
{ {
migraphx::shape arg_shape{migraphx::shape::float_type, {bs, 4}}; migraphx::shape arg_shape{migraphx::shape::float_type, {bs, 4}};
std::vector<float> data = get_random_values(arg_shape.elements());
migraphx::parameter_map params; migraphx::parameter_map params;
params["data"] = migraphx::argument(arg_shape, data.data()); params["data"] = migraphx::generate_argument(arg_shape, arg_shape.elements());
auto result = p.eval(params).back(); std::vector<float> input_data;
std::vector<float> result_vec; params["data"].visit([&](const auto& vec) { input_data.assign(vec.begin(), vec.end()); });
result.visit([&](auto output) { result_vec.assign(output.begin(), output.end()); }); std::transform(input_data.begin(), input_data.end(), input_data.begin(), [](const auto& i) {
std::vector<float> gold = data; return i + 12.0;
std::transform(gold.begin(), gold.end(), gold.begin(), [&](auto i) { return i + 12; }); });
EXPECT(migraphx::verify_range(gold, result_vec)); auto result = p.eval(params).back();
EXPECT(migraphx::argument(arg_shape, input_data.data()) == result);
} }
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment