Commit 0bd4e8bb authored by Shucai Xiao's avatar Shucai Xiao
Browse files

clang format

parent 60a0f286
......@@ -55,7 +55,7 @@ struct convolution
check_shapes{inputs, *this}.has(2).same_type().same_ndims().min_ndims(3);
check_attribute_size();
// dim num of input and attribute should match
auto in_lens = inputs[0].lens();
auto in_lens = inputs[0].lens();
auto input_size = in_lens.size();
auto padding_size = padding.size();
if(not(input_size == padding_size / 2 + 2 or input_size == padding_size + 2))
......
......@@ -32,7 +32,7 @@ struct reshape
{
check_shapes{inputs, *this}.has(1).standard();
// input shape is dynamic, return dim directly
if (inputs.front().dynamic())
if(inputs.front().dynamic())
{
std::vector<std::size_t> rdims(dims.begin(), dims.end());
return {inputs.front().type(), rdims};
......
......@@ -301,7 +301,7 @@ argument instruction::eval(bool check_eval) const
{
return this->get_literal().get_argument();
}
else if (op.name() == "shape")
else if(op.name() == "shape")
{
argument arg{this->inputs().front()->get_shape()};
return normalized_operator().compute(result, {arg});
......
......@@ -420,14 +420,14 @@ shape onnx_parser::parse_type(const onnx::TypeProto& t,
{
if(static_cast<int>(d.dim_value()) <= 0)
{
// return default_dim_value;
// return default_dim_value;
return 0;
}
return d.dim_value();
}
else
{
// return default_dim_value;
// return default_dim_value;
return 0;
}
});
......
......@@ -274,7 +274,8 @@ bool shape::scalar() const
bool shape::dynamic() const
{
if (scalar()) return false;
if(scalar())
return false;
const auto& lens = this->lens();
return std::find(lens.begin(), lens.end(), 0) != lens.end();
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment