Commit 7bab863d authored by Shucai Xiao's avatar Shucai Xiao
Browse files

clang format.

parent a7a3f867
...@@ -632,7 +632,7 @@ struct reshape ...@@ -632,7 +632,7 @@ struct reshape
rdims[i] = missing_dim; rdims[i] = missing_dim;
} }
} }
shape s{inputs.front().type(), rdims}; shape s{inputs.front().type(), rdims};
if(s.elements() != inputs.front().elements()) if(s.elements() != inputs.front().elements())
MIGRAPHX_THROW("Wrong number of elements for reshape"); MIGRAPHX_THROW("Wrong number of elements for reshape");
......
...@@ -740,7 +740,7 @@ struct onnx_parser ...@@ -740,7 +740,7 @@ struct onnx_parser
}); });
// bidirectional case should have two activation functions. // bidirectional case should have two activation functions.
// one is for forward, and the other is for reverse. // one is for forward, and the other is for reverse.
// if only one actv function is provided, we use it in both // if only one actv function is provided, we use it in both
// forward and reverse direction // forward and reverse direction
if(dirct == op::rnn::bidirectional) if(dirct == op::rnn::bidirectional)
......
...@@ -95,7 +95,8 @@ void rewrite_rnn::apply(program& prog) const ...@@ -95,7 +95,8 @@ void rewrite_rnn::apply(program& prog) const
instruction_ref hidden_output{}; instruction_ref hidden_output{};
if(ret_forward[0] == prog.end()) if(ret_forward[0] == prog.end())
{ {
hidden_output = prog.replace_instruction(ins, op::concat{1}, ret_forward[1], ret_reverse[1]); hidden_output = prog.replace_instruction(
ins, op::concat{1}, ret_forward[1], ret_reverse[1]);
} }
else else
{ {
...@@ -103,7 +104,8 @@ void rewrite_rnn::apply(program& prog) const ...@@ -103,7 +104,8 @@ void rewrite_rnn::apply(program& prog) const
prog.insert_instruction(ins, op::concat{0}, ret_forward[0], ret_forward[1]); prog.insert_instruction(ins, op::concat{0}, ret_forward[0], ret_forward[1]);
ret_reverse[0] = ret_reverse[0] =
prog.insert_instruction(ins, op::concat{0}, ret_reverse[1], ret_reverse[0]); prog.insert_instruction(ins, op::concat{0}, ret_reverse[1], ret_reverse[0]);
hidden_output = prog.replace_instruction(ins, op::concat{1}, {ret_forward[0], ret_reverse[0]}); hidden_output = prog.replace_instruction(
ins, op::concat{1}, {ret_forward[0], ret_reverse[0]});
} }
map_last_output[hidden_output] = last_output; map_last_output[hidden_output] = last_output;
} }
...@@ -151,7 +153,8 @@ void rewrite_rnn::apply(program& prog) const ...@@ -151,7 +153,8 @@ void rewrite_rnn::apply(program& prog) const
{ {
auto concat_arg0 = is_forward ? ret[0] : ret[1]; auto concat_arg0 = is_forward ? ret[0] : ret[1];
auto concat_arg1 = is_forward ? ret[1] : ret[0]; auto concat_arg1 = is_forward ? ret[1] : ret[0];
hidden_output = prog.replace_instruction(ins, op::concat{0}, concat_arg0, concat_arg1); hidden_output =
prog.replace_instruction(ins, op::concat{0}, concat_arg0, concat_arg1);
} }
map_last_output[hidden_output] = last_output; map_last_output[hidden_output] = last_output;
} }
...@@ -166,7 +169,7 @@ void rewrite_rnn::apply(program& prog) const ...@@ -166,7 +169,7 @@ void rewrite_rnn::apply(program& prog) const
auto inputs = ins->inputs(); auto inputs = ins->inputs();
assert(inputs.size() == 1); assert(inputs.size() == 1);
auto arg = inputs[0]; auto arg = inputs[0];
if (map_last_output.count(arg) == 0) if(map_last_output.count(arg) == 0)
{ {
MIGRAPHX_THROW("RNN_LAST_OUTPUT: no related rnn operator as its input"); MIGRAPHX_THROW("RNN_LAST_OUTPUT: no related rnn operator as its input");
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment