Commit d527d5d2 authored by Shucai Xiao's avatar Shucai Xiao
Browse files

clang format

parent 148e548d
...@@ -819,7 +819,8 @@ struct onnx_parser ...@@ -819,7 +819,8 @@ struct onnx_parser
auto names = attributes.at("activations").strings(); auto names = attributes.at("activations").strings();
vec_names.clear(); vec_names.clear();
vec_names.resize(names.size()); vec_names.resize(names.size());
std::transform(names.begin(), names.end(), vec_names.begin(), [] (auto &str) { return str; }); std::transform(
names.begin(), names.end(), vec_names.begin(), [](auto& str) { return str; });
} }
// need 4 activation functions // need 4 activation functions
......
...@@ -15,7 +15,7 @@ void rewrite_gru::apply(program& prog) const ...@@ -15,7 +15,7 @@ void rewrite_gru::apply(program& prog) const
{ {
if(ins->name() == "gru") if(ins->name() == "gru")
{ {
const auto actv_funcs = compute_actv_funcs(ins); const auto actv_funcs = compute_actv_funcs(ins);
// could be 3 to 5 inputs (though onnx::rnn has 6 inputs, // could be 3 to 5 inputs (though onnx::rnn has 6 inputs,
// the 5th one is undefined and ignored by protobuf. so // the 5th one is undefined and ignored by protobuf. so
// we need to process up to 5 inputs // we need to process up to 5 inputs
...@@ -71,7 +71,7 @@ void rewrite_gru::apply(program& prog) const ...@@ -71,7 +71,7 @@ void rewrite_gru::apply(program& prog) const
bias_forward, bias_forward,
ih_forward, ih_forward,
gru_op.linear_before_reset, gru_op.linear_before_reset,
actv_funcs.at(0), actv_funcs.at(0),
actv_funcs.at(1)); actv_funcs.at(1));
auto ret_reverse = gru_cell(false, auto ret_reverse = gru_cell(false,
...@@ -83,7 +83,7 @@ void rewrite_gru::apply(program& prog) const ...@@ -83,7 +83,7 @@ void rewrite_gru::apply(program& prog) const
bias_reverse, bias_reverse,
ih_reverse, ih_reverse,
gru_op.linear_before_reset, gru_op.linear_before_reset,
actv_funcs.at(2), actv_funcs.at(2),
actv_funcs.at(3)); actv_funcs.at(3));
auto concat_output = auto concat_output =
...@@ -143,7 +143,7 @@ void rewrite_gru::apply(program& prog) const ...@@ -143,7 +143,7 @@ void rewrite_gru::apply(program& prog) const
bias, bias,
ih, ih,
gru_op.linear_before_reset, gru_op.linear_before_reset,
actv_funcs.at(0), actv_funcs.at(0),
actv_funcs.at(1)); actv_funcs.at(1));
auto last_output = prog.insert_instruction(ins, op::squeeze{{0}}, ret[1]); auto last_output = prog.insert_instruction(ins, op::squeeze{{0}}, ret[1]);
...@@ -348,20 +348,20 @@ std::vector<operation> rewrite_gru::compute_actv_funcs(instruction_ref ins) cons ...@@ -348,20 +348,20 @@ std::vector<operation> rewrite_gru::compute_actv_funcs(instruction_ref ins) cons
if(gru_op.actv_funcs.empty()) if(gru_op.actv_funcs.empty())
return {op::sigmoid{}, op::tanh{}, op::sigmoid{}, op::tanh{}}; return {op::sigmoid{}, op::tanh{}, op::sigmoid{}, op::tanh{}};
else if(gru_op.actv_funcs.size() == 1) else if(gru_op.actv_funcs.size() == 1)
return {gru_op.actv_funcs.at(0), return {gru_op.actv_funcs.at(0),
gru_op.actv_funcs.at(0), gru_op.actv_funcs.at(0),
gru_op.actv_funcs.at(0), gru_op.actv_funcs.at(0),
gru_op.actv_funcs.at(0)}; gru_op.actv_funcs.at(0)};
else if (gru_op.actv_funcs.size() == 2) else if(gru_op.actv_funcs.size() == 2)
return {gru_op.actv_funcs.at(0), return {gru_op.actv_funcs.at(0),
gru_op.actv_funcs.at(1), gru_op.actv_funcs.at(1),
gru_op.actv_funcs.at(0), gru_op.actv_funcs.at(0),
gru_op.actv_funcs.at(1)}; gru_op.actv_funcs.at(1)};
else if (gru_op.actv_funcs.size() == 3) else if(gru_op.actv_funcs.size() == 3)
return {gru_op.actv_funcs.at(0), return {gru_op.actv_funcs.at(0),
gru_op.actv_funcs.at(1), gru_op.actv_funcs.at(1),
gru_op.actv_funcs.at(2), gru_op.actv_funcs.at(2),
gru_op.actv_funcs.at(0)}; gru_op.actv_funcs.at(0)};
else else
return gru_op.actv_funcs; return gru_op.actv_funcs;
} }
...@@ -369,9 +369,8 @@ std::vector<operation> rewrite_gru::compute_actv_funcs(instruction_ref ins) cons ...@@ -369,9 +369,8 @@ std::vector<operation> rewrite_gru::compute_actv_funcs(instruction_ref ins) cons
{ {
if(gru_op.actv_funcs.empty()) if(gru_op.actv_funcs.empty())
return {op::sigmoid{}, op::tanh{}}; return {op::sigmoid{}, op::tanh{}};
else if (gru_op.actv_funcs.size() == 1) else if(gru_op.actv_funcs.size() == 1)
return {gru_op.actv_funcs.at(0), return {gru_op.actv_funcs.at(0), gru_op.actv_funcs.at(0)};
gru_op.actv_funcs.at(0)};
else else
return gru_op.actv_funcs; return gru_op.actv_funcs;
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment