".circleci/git@developer.sourcefind.cn:OpenDAS/torchaudio.git" did not exist on "70f429a419dc0bab700b576beaf1f6f0b02da40a"
Unverified Commit ccd08b4c authored by turneram's avatar turneram Committed by GitHub
Browse files

Add remaining random ops for Barracuda models (#963)

Add RandomNormal, RandomNormalLike, RandomUniform, and RandomUniformLike to onnx parser and onnx tests

Each pair of Random*/Random*Like is implemented using a single op_parser because the ops share the same essential attributes and algorithm with the difference that Random*Like get the output type and/or shape from an input argument and Random* take both from attributes.

Resolves #907
Resolves #959
parent 87b2fe35
#include <migraphx/onnx/op_parser.hpp>
#include <migraphx/ranges.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/make_op.hpp>
#include <migraphx/onnx/checks.hpp>
#include <random>
#include <set>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
namespace onnx {
struct parse_randomnormal_ops : op_parser<parse_randomnormal_ops>
{
const std::set<shape::type_t> valid_types = {
shape::float_type, shape::half_type, shape::double_type};
std::vector<op_desc> operators() const { return {{"RandomNormal"}, {"RandomNormalLike"}}; }
instruction_ref parse(const op_desc& opd,
const onnx_parser& parser,
const onnx_parser::node_info& info,
std::vector<instruction_ref> args) const
{
int dtype = 1;
bool use_dtype = false;
if(contains(info.attributes, "dtype"))
{
dtype = info.attributes.at("dtype").i();
use_dtype = true;
}
shape::type_t out_type = get_type(dtype);
if(not contains(valid_types, out_type))
MIGRAPHX_THROW(opd.op_name + ": invalid output type: " + std::to_string(dtype) +
". Valid types are 1 (float), 10 (half), and 11 (double).");
float mean = 0.0;
if(contains(info.attributes, "mean"))
mean = info.attributes.at("mean").f();
float scale = 1.0;
if(contains(info.attributes, "scale"))
scale = info.attributes.at("scale").f();
float seed = static_cast<float>(
std::chrono::high_resolution_clock::now().time_since_epoch().count());
if(contains(info.attributes, "seed"))
seed = info.attributes.at("seed").f();
shape out_shape;
if(contains(info.attributes, "shape"))
{
// RandomNormal:
// output type and shape must come from attributes
std::vector<int> out_lens;
literal ls = parser.parse_value(info.attributes.at("shape"));
ls.visit([&](auto s) { out_lens.assign(s.begin(), s.end()); });
out_shape = shape{out_type, out_lens};
}
else if(args.size() == 1)
{
// RandomNormalLike:
// output type and shape are the same as the input's by default
// dtype is used instead when attribute is set
if(not contains(valid_types, args[0]->get_shape().type()))
MIGRAPHX_THROW(opd.op_name + ": invalid output type: " +
std::to_string(args[0]->get_shape().type()) +
". Valid types are float, half, and double.");
out_shape =
use_dtype ? shape{out_type, args[0]->get_shape().lens()} : args[0]->get_shape();
}
else
{
MIGRAPHX_THROW(opd.op_name +
": cannot deduce shape without shape attribute or argument.");
}
std::mt19937 gen(seed);
std::normal_distribution<> d(mean, scale);
std::vector<double> rand_vals(out_shape.elements());
std::generate(rand_vals.begin(), rand_vals.end(), [&]() { return d(gen); });
return info.add_literal(literal{out_shape, rand_vals});
}
};
} // namespace onnx
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
#include <migraphx/onnx/op_parser.hpp>
#include <migraphx/ranges.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/make_op.hpp>
#include <migraphx/onnx/checks.hpp>
#include <random>
#include <set>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
namespace onnx {
struct parse_randomuniform_ops : op_parser<parse_randomuniform_ops>
{
const std::set<shape::type_t> valid_types = {
shape::float_type, shape::half_type, shape::double_type};
std::vector<op_desc> operators() const { return {{"RandomUniform"}, {"RandomUniformLike"}}; }
instruction_ref parse(const op_desc& opd,
const onnx_parser& parser,
const onnx_parser::node_info& info,
std::vector<instruction_ref> args) const
{
int dtype = 1;
bool use_dtype = false;
if(contains(info.attributes, "dtype"))
{
dtype = info.attributes.at("dtype").i();
use_dtype = true;
}
shape::type_t out_type = get_type(dtype);
if(not contains(valid_types, out_type))
MIGRAPHX_THROW(opd.op_name + ": invalid output type: " + std::to_string(dtype) +
". Valid types are 1 (float), 10 (half), and 11 (double).");
float high = 1.0;
if(contains(info.attributes, "high"))
high = info.attributes.at("high").f();
float low = 0.0;
if(contains(info.attributes, "low"))
low = info.attributes.at("low").f();
float seed = static_cast<float>(
std::chrono::high_resolution_clock::now().time_since_epoch().count());
if(contains(info.attributes, "seed"))
seed = info.attributes.at("seed").f();
shape out_shape;
if(contains(info.attributes, "shape"))
{
// RandomUniform:
// output type and shape must come from attributes
std::vector<int> out_lens;
literal ls = parser.parse_value(info.attributes.at("shape"));
ls.visit([&](auto s) { out_lens.assign(s.begin(), s.end()); });
out_shape = shape{out_type, out_lens};
}
else if(args.size() == 1)
{
// RandomUniformLike:
// output type and shape are the same as the input by default
// dtype is used instead when attribute is set
if(not contains(valid_types, args[0]->get_shape().type()))
MIGRAPHX_THROW(opd.op_name + ": invalid output type: " +
std::to_string(args[0]->get_shape().type()) +
". Valid types are float, half, and double.");
out_shape =
use_dtype ? shape{out_type, args[0]->get_shape().lens()} : args[0]->get_shape();
}
else
{
MIGRAPHX_THROW(opd.op_name +
": cannot deduce shape without shape attribute or argument.");
}
std::mt19937 gen(seed);
std::uniform_real_distribution<> d(high, low);
std::vector<double> rand_vals(out_shape.elements());
std::generate(rand_vals.begin(), rand_vals.end(), [&]() { return d(gen); });
return info.add_literal(literal{out_shape, rand_vals});
}
};
} // namespace onnx
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
...@@ -2992,6 +2992,186 @@ def quantizelinear_neg_axis_test(): ...@@ -2992,6 +2992,186 @@ def quantizelinear_neg_axis_test():
return make_quantizelinear_axis_graph(-2) return make_quantizelinear_axis_graph(-2)
@onnx_test
def randomnormal_test():
dtype = 11
mean = 10.0
scale = 1.5
seed = 0.0
shape = [2, 3, 4]
output = helper.make_tensor_value_info('output', TensorProto.DOUBLE,
[2, 3, 4])
node = onnx.helper.make_node('RandomNormal',
inputs=[],
outputs=['output'],
dtype=dtype,
mean=mean,
scale=scale,
seed=seed,
shape=shape)
return ([node], [], [output])
@onnx_test
def randomnormal_dtype_error_test():
dtype = 6
shape = [2, 3, 4]
output = helper.make_tensor_value_info('output', TensorProto.INT32,
[2, 3, 4])
node = onnx.helper.make_node('RandomNormal',
inputs=[],
outputs=['output'],
dtype=dtype,
shape=shape)
return ([node], [], [output])
@onnx_test
def randomnormal_shape_error_test():
dtype = 1
output = helper.make_tensor_value_info('output', TensorProto.FLOAT,
[2, 3, 4])
node = onnx.helper.make_node('RandomNormal',
inputs=[],
outputs=['output'],
dtype=dtype)
return ([node], [], [output])
@onnx_test
def randomnormallike_test():
dtype = 10
mean = 10.0
scale = 1.5
seed = 0.0
input = helper.make_tensor_value_info('input', TensorProto.FLOAT16,
[2, 3, 4])
output = helper.make_tensor_value_info('output', TensorProto.FLOAT16,
[2, 3, 4])
node = onnx.helper.make_node('RandomNormalLike',
inputs=['input'],
outputs=['output'],
dtype=dtype,
mean=mean,
scale=scale,
seed=seed)
return ([node], [input], [output])
@onnx_test
def randomnormallike_type_error_test():
seed = 0
input = helper.make_tensor_value_info('input', TensorProto.INT32,
[2, 3, 4])
output = helper.make_tensor_value_info('output', TensorProto.FLOAT,
[2, 3, 4])
node = onnx.helper.make_node('RandomNormalLike',
inputs=['input'],
outputs=['output'],
seed=seed)
return ([node], [input], [output])
@onnx_test
def randomuniform_test():
dtype = 11
high = 1.0
low = 0.0
seed = 0.0
shape = [2, 3, 4]
output = helper.make_tensor_value_info('output', TensorProto.DOUBLE,
[2, 3, 4])
node = onnx.helper.make_node('RandomUniform',
inputs=[],
outputs=['output'],
dtype=dtype,
high=high,
low=low,
seed=seed,
shape=shape)
return ([node], [], [output])
@onnx_test
def randomuniform_dtype_error_test():
dtype = 6
shape = [2, 3, 4]
output = helper.make_tensor_value_info('output', TensorProto.INT32,
[2, 3, 4])
node = onnx.helper.make_node('RandomUniform',
inputs=[],
outputs=['output'],
dtype=dtype,
shape=shape)
return ([node], [], [output])
@onnx_test
def randomuniform_shape_error_test():
dtype = 1
output = helper.make_tensor_value_info('output', TensorProto.FLOAT,
[2, 3, 4])
node = onnx.helper.make_node('RandomUniform',
inputs=[],
outputs=['output'],
dtype=dtype)
return ([node], [], [output])
@onnx_test
def randomuniformlike_test():
dtype = 10
high = 10.0
low = 1.0
seed = 0.0
input = helper.make_tensor_value_info('input', TensorProto.FLOAT16,
[2, 3, 4])
output = helper.make_tensor_value_info('output', TensorProto.FLOAT16,
[2, 3, 4])
node = onnx.helper.make_node('RandomUniformLike',
inputs=['input'],
outputs=['output'],
dtype=dtype,
high=high,
low=low,
seed=seed)
return ([node], [input], [output])
@onnx_test
def randomuniformlike_type_error_test():
seed = 0
input = helper.make_tensor_value_info('input', TensorProto.INT32,
[2, 3, 4])
output = helper.make_tensor_value_info('output', TensorProto.FLOAT,
[2, 3, 4])
node = onnx.helper.make_node('RandomUniformLike',
inputs=['input'],
outputs=['output'],
seed=seed)
return ([node], [input], [output])
@onnx_test @onnx_test
def range_test(): def range_test():
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include <migraphx/op/lrn.hpp> #include <migraphx/op/lrn.hpp>
#include <migraphx/op/reshape.hpp> #include <migraphx/op/reshape.hpp>
#include <migraphx/op/unknown.hpp> #include <migraphx/op/unknown.hpp>
#include <random>
#include <migraphx/serialize.hpp> #include <migraphx/serialize.hpp>
...@@ -2725,6 +2726,130 @@ TEST_CASE(quantizelinear_neg_axis_test) ...@@ -2725,6 +2726,130 @@ TEST_CASE(quantizelinear_neg_axis_test)
EXPECT(p.sort() == prog.sort()); EXPECT(p.sort() == prog.sort());
} }
TEST_CASE(randomnormal_test)
{
float mean = 10.0;
float scale = 1.5;
float seed = 0.0;
std::vector<int> shape_attr{2, 3, 4};
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::double_type, shape_attr};
std::vector<double> rand_vals(s.elements());
std::mt19937 gen(seed);
std::normal_distribution<> d(mean, scale);
std::generate(rand_vals.begin(), rand_vals.end(), [&]() { return d(gen); });
mm->add_literal(migraphx::literal{s, rand_vals});
auto prog = optimize_onnx("randomnormal_test.onnx");
EXPECT(p == prog);
}
TEST_CASE(randomnormal_dtype_error_test)
{
EXPECT(test::throws([&] { migraphx::parse_onnx("randomnormal_dtype_error_test.onnx"); }));
}
TEST_CASE(randomnormal_shape_error_test)
{
EXPECT(test::throws([&] { migraphx::parse_onnx("randomnormal_shape_error_test.onnx"); }));
}
TEST_CASE(randomnormallike_test)
{
float mean = 10.0;
float scale = 1.5;
float seed = 0.0;
std::vector<int> shape_attr{2, 3, 4};
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::half_type, shape_attr};
std::vector<double> rand_vals(s.elements());
std::mt19937 gen(seed);
std::normal_distribution<> d(mean, scale);
std::generate(rand_vals.begin(), rand_vals.end(), [&]() { return d(gen); });
mm->add_parameter("input", s);
mm->add_literal(migraphx::literal{s, rand_vals});
auto prog = optimize_onnx("randomnormallike_test.onnx");
EXPECT(p == prog);
}
TEST_CASE(randomnormallike_type_error_test)
{
EXPECT(test::throws([&] { migraphx::parse_onnx("randomnormallike_type_error_test.onnx"); }));
}
TEST_CASE(randomuniform_test)
{
float high = 1.0;
float low = 0.0;
float seed = 0.0;
std::vector<int> shape_attr{2, 3, 4};
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::double_type, shape_attr};
std::vector<double> rand_vals(s.elements());
std::mt19937 gen(seed);
std::uniform_real_distribution<> d(low, high);
std::generate(rand_vals.begin(), rand_vals.end(), [&]() { return d(gen); });
mm->add_literal(migraphx::literal{s, rand_vals});
auto prog = optimize_onnx("randomuniform_test.onnx");
EXPECT(p == prog);
}
TEST_CASE(randomuniform_dtype_error_test)
{
EXPECT(test::throws([&] { migraphx::parse_onnx("randomuniform_dtype_error_test.onnx"); }));
}
TEST_CASE(randomuniform_shape_error_test)
{
EXPECT(test::throws([&] { migraphx::parse_onnx("randomuniform_shape_error_test.onnx"); }));
}
TEST_CASE(randomuniformlike_test)
{
float high = 10.0;
float low = 1.0;
float seed = 0.0;
std::vector<int> shape_attr{2, 3, 4};
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape s{migraphx::shape::half_type, shape_attr};
std::vector<double> rand_vals(s.elements());
std::mt19937 gen(seed);
std::uniform_real_distribution<> d(low, high);
std::generate(rand_vals.begin(), rand_vals.end(), [&]() { return d(gen); });
mm->add_parameter("input", s);
mm->add_literal(migraphx::literal{s, rand_vals});
auto prog = optimize_onnx("randomuniformlike_test.onnx");
EXPECT(p == prog);
}
TEST_CASE(randomuniformlike_type_error_test)
{
EXPECT(test::throws([&] { migraphx::parse_onnx("randomuniformlike_type_error_test.onnx"); }));
}
TEST_CASE(range_test) TEST_CASE(range_test)
{ {
migraphx::program p; migraphx::program p;
......
randomnormal_dtype_error_test:u
6output" RandomNormal*
dtype*
shape@@@randomnormal_dtype_error_testb
output



B
\ No newline at end of file
randomnormal_shape_error_test:c
$output" RandomNormal*
dtyperandomnormal_shape_error_testb
output



B
\ No newline at end of file
randomuniform_dtype_error_test:w
7output" RandomUniform*
dtype*
shape@@@randomuniform_dtype_error_testb
output



B
\ No newline at end of file
randomuniform_shape_error_test:e
%output" RandomUniform*
dtyperandomuniform_shape_error_testb
output



B
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment