Commit 184804b4 authored by Shucai Xiao's avatar Shucai Xiao
Browse files

remove unnecessary changes

parent ade4bccd
...@@ -81,15 +81,7 @@ template <class T> ...@@ -81,15 +81,7 @@ template <class T>
std::vector<T> generate_tensor_data(const migraphx::shape& s, unsigned long seed = 0) std::vector<T> generate_tensor_data(const migraphx::shape& s, unsigned long seed = 0)
{ {
std::vector<T> result(s.elements()); std::vector<T> result(s.elements());
shape::type_t type = s.type(); std::generate(result.begin(), result.end(), xorshf96_generator<T>{seed});
if(type == shape::int64_type or type == shape::int32_type)
{
std::generate(result.begin(), result.end(), [] { return 1; });
}
else
{
std::generate(result.begin(), result.end(), xorshf96_generator<T>{seed});
}
// std::generate(result.begin(), result.end(), [&]{ return seed % 7; }); // std::generate(result.begin(), result.end(), [&]{ return seed % 7; });
// std::generate(result.begin(), result.end(), []{ return 1; }); // std::generate(result.begin(), result.end(), []{ return 1; });
......
...@@ -261,28 +261,15 @@ struct onnx_parser ...@@ -261,28 +261,15 @@ struct onnx_parser
return prog.add_instruction(op, std::move(args)); return prog.add_instruction(op, std::move(args));
} }
// instruction_ref instruction_ref
// parse_softmax(const std::string&, const attribute_map&, std::vector<instruction_ref> args) parse_softmax(const std::string&, const attribute_map&, std::vector<instruction_ref> args)
// {
// auto dims = args.front()->get_shape().lens();
// auto r =
// prog.add_instruction(op::reshape{{long(dims[0]), long(dims[1]), 1, 1}},
// args.front());
// auto s = prog.add_instruction(op::softmax{}, r);
// return prog.add_instruction(op::reshape{{long(dims[0]), long(dims[1])}}, s);
// }
instruction_ref parse_softmax(const std::string&,
const attribute_map& attributes,
std::vector<instruction_ref> args)
{ {
int axis = 1; auto dims = args.front()->get_shape().lens();
if(contains(attributes, "axis")) auto r =
{ prog.add_instruction(op::reshape{{long(dims[0]), long(dims[1]), 1, 1}},
axis = parse_value(attributes.at("axis")).at<int>(); args.front());
} auto s = prog.add_instruction(op::softmax{}, r);
return prog.add_instruction(op::reshape{{long(dims[0]), long(dims[1])}}, s);
return prog.add_instruction(op::softmax{axis}, std::move(args));
} }
instruction_ref parse_logsoftmax(const std::string&, instruction_ref parse_logsoftmax(const std::string&,
......
...@@ -423,7 +423,9 @@ TEST_CASE(softmax_test) ...@@ -423,7 +423,9 @@ TEST_CASE(softmax_test)
{ {
migraphx::program p; migraphx::program p;
auto l0 = p.add_parameter("0", migraphx::shape{migraphx::shape::float_type, {1, 3}}); auto l0 = p.add_parameter("0", migraphx::shape{migraphx::shape::float_type, {1, 3}});
p.add_instruction(migraphx::op::softmax{1}, l0); auto r = p.add_instruction(migraphx::op::reshape{{1, 3, 1, 1}}, l0);
auto s = p.add_instruction(migraphx::op::softmax{}, r);
p.add_instruction(migraphx::op::reshape{{1, 3}}, s);
auto prog = migraphx::parse_onnx("softmax_test.onnx"); auto prog = migraphx::parse_onnx("softmax_test.onnx");
EXPECT(p == prog); EXPECT(p == prog);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment