Commit 19ea0bf9 authored by Paul's avatar Paul
Browse files

Merge branch 'jit-vector-reduce' into jit-vector-softmax

parents c4cd8b0a 85897b5a
......@@ -3178,6 +3178,20 @@ def mean_test():
return ([node], data, [mean])
@onnx_test
def mean_integral_test():
data = [
helper.make_tensor_value_info(str(i), TensorProto.INT32, [2, 2, 2])
for i in range(10)
]
data_names = [str(i) for i in range(10)]
mean = helper.make_tensor_value_info('mean', TensorProto.INT32, [2, 2, 2])
node = onnx.helper.make_node("Mean", inputs=data_names, outputs=["mean"])
return ([node], data, [mean])
@onnx_test
def min_test():
a = helper.make_tensor_value_info('0', TensorProto.FLOAT, [3])
......
mean_integral_test:Ö
*
0
1
2
3
4
5
6
7
8
9mean"Meanmean_integral_testZ
0



Z
1



Z
2



Z
3



Z
4



Z
5



Z
6



Z
7



Z
8



Z
9



b
mean



B
\ No newline at end of file
......@@ -2890,6 +2890,30 @@ TEST_CASE(mean_test)
EXPECT(p == prog);
}
TEST_CASE(mean_integral_test)
{
const std::size_t num_data = 10;
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::int32_type, {2, 2, 2}};
auto mean = mm->add_parameter("0", s);
for(std::size_t i = 1; i < num_data; ++i)
{
auto data = mm->add_parameter(std::to_string(i), s);
mean = mm->add_instruction(migraphx::make_op("add"), mean, data);
}
auto div_lit = mm->add_literal(migraphx::literal{migraphx::shape{s.type()}, {num_data}});
auto divisor =
mm->add_instruction(migraphx::make_op("multibroadcast", {{"out_lens", s.lens()}}), div_lit);
mean = mm->add_instruction(migraphx::make_op("div"), mean, divisor);
auto prog = optimize_onnx("mean_integral_test.onnx");
EXPECT(p == prog);
}
TEST_CASE(min_test)
{
migraphx::program p;
......
......@@ -581,6 +581,33 @@ TEST_CASE(mean_test)
EXPECT(migraphx::verify_range(result_vector, gold));
}
TEST_CASE(mean_integral_test)
{
migraphx::program p = migraphx::parse_onnx("mean_integral_test.onnx");
p.compile(migraphx::ref::target{});
migraphx::shape s{migraphx::shape::int32_type, {2, 2, 2}};
const int num_elms = 8;
const int num_data = 10;
const std::vector<int> scalars{1, 5, 14, 2, 6, 21, 101, 0, -4, -11};
std::vector<std::vector<int>> data;
std::transform(scalars.begin(), scalars.end(), std::back_inserter(data), [&](const auto i) {
return std::vector<int>(num_elms, i);
});
migraphx::parameter_map pp;
for(std::size_t i = 0; i < num_data; ++i)
pp[std::to_string(i)] = migraphx::argument(s, data[i].data());
auto result = p.eval(pp).back();
std::vector<double> result_vector;
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
const auto mean = std::accumulate(scalars.begin(), scalars.end(), 0) / num_data;
std::vector<int> gold(num_elms, mean);
EXPECT(migraphx::verify_range(result_vector, gold));
}
TEST_CASE(nonzero_test)
{
migraphx::program p = migraphx::parse_onnx("nonzero_dynamic_test.onnx");
......
......@@ -109,6 +109,29 @@ TEST_CASE(transposed1)
EXPECT(eshapes == rshapes);
}
TEST_CASE(non_packed_empty1)
{
std::vector<migraphx::shape> ishapes = {make_shape({1, 12}, {589824, 64})};
std::vector<migraphx::shape> eshapes = {make_shape({12}, {64})};
auto rshapes = migraphx::reduce_dims(ishapes);
EXPECT(eshapes == rshapes);
}
TEST_CASE(non_packed_empty2)
{
std::vector<migraphx::shape> ishapes = {make_shape({12, 1}, {64, 589824})};
std::vector<migraphx::shape> eshapes = {make_shape({12}, {64})};
auto rshapes = migraphx::reduce_dims(ishapes);
EXPECT(eshapes == rshapes);
}
TEST_CASE(single_dim)
{
std::vector<migraphx::shape> ishapes = {make_shape({1}, {1})};
auto rshapes = migraphx::reduce_dims(ishapes);
EXPECT(ishapes == rshapes);
}
TEST_CASE(empty)
{
auto rshapes = migraphx::reduce_dims({});
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment