Commit 75d5c660 authored by Khalique's avatar Khalique
Browse files

progress on fixing padding

parent 4fd8c544
......@@ -11,6 +11,14 @@ inline std::size_t calculate_padding(std::size_t weight_dim, std::size_t dilatio
return (dilation * (weight_dim - 1)) / 2;
}
inline void calculate_padding(int64_t idx, std::vector<int64_t>& pads, int64_t input_dim, int64_t stride, int64_t dilation, int64_t weight_dim)
{
int64_t output_dim = input_dim / stride;
int64_t pad = std::max(static_cast<int64_t>(0), (output_dim - 1) * stride + dilation * weight_dim - input_dim);
pads[idx] = pad / 2;
pads[idx + 2] = pad - pad / 2;
}
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
......
......@@ -352,7 +352,7 @@ struct onnx_parser
{
// insert zeros for pad op (args[0] has 4 dims)
padding = {0, 0, padding[0], padding[1], 0, 0, padding[2], padding[3]};
l0 = prog.add_instruction(op::pad{padding}, l0);
l0 = prog.add_instruction(op::pad{padding, std::numeric_limits<float>::lowest()}, l0);
}
else
{
......
......@@ -14,6 +14,25 @@ argument
pad(hipStream_t stream, argument result, argument arg1, float value, std::vector<std::int64_t> pads)
{
std::size_t nelements = arg1.get_shape().elements();
// if(value == std::numeric_limits<float>::lowest())
// {
// visit_all(result)([&](auto output) {
// auto* outptr = output.data();
// gs_launch(stream, nelements)([=](auto i) {
// outptr[i] = std::numeric_limits<typename decltype(output)::value_type>::lowest();
// });
// });
// }
// else
// {
// visit_all(result)([&](auto output) {
// auto* outptr = output.data();
// gs_launch(stream, nelements)([=](auto i) {
// outptr[i] = static_cast<typename decltype(output)::value_type>(value);
// });
// });
// }
nary(stream, result)([=] { return value; });
visit_all(result, arg1)([&](auto output, auto input) {
......
......@@ -317,17 +317,34 @@ struct tf_parser
}
}
auto l0 = args[0];
if(contains(attributes, "padding"))
{
const std::string& pad_mode = attributes.at("padding").s();
if(pad_mode.find("SAME") != std::string::npos)
{
op.padding_mode = op::padding_mode_t::same;
// op.padding_mode = op::padding_mode_t::same;
std::vector<size_t> weight_dims = weights->get_shape().lens();
size_t weight_h = weight_dims[2];
size_t weight_w = weight_dims[3];
op.padding[0] = calculate_padding(weight_h, op.dilation[0]);
op.padding[1] = calculate_padding(weight_w, op.dilation[1]);
auto input_dims = l0->get_shape().lens();
size_t input_h = input_dims[2];
size_t input_w = input_dims[3];
std::vector<int64_t> pads(input_dims.size());
calculate_padding(0, pads, input_h, op.stride[0], op.dilation[0], weight_h);
calculate_padding(1, pads, input_w, op.stride[1], op.dilation[1], weight_w);
if(pads[0] != pads[2] || pads[1] != pads[3])
{
std::vector<int64_t> padding = {0, 0, pads[0], pads[1], 0, 0, pads[2], pads[3]};
l0 = prog.add_instruction(migraphx::op::pad{padding}, l0);
}
else
{
op.padding[0] = pads[0];
op.padding[1] = pads[1];
}
}
else if(pad_mode.find("VALID") != std::string::npos)
{
......@@ -350,7 +367,7 @@ struct tf_parser
}
}
return prog.add_instruction(op, {args[0], weights});
return prog.add_instruction(op, {l0, weights});
}
instruction_ref parse_depthwiseconv(const std::string&,
......@@ -400,17 +417,35 @@ struct tf_parser
}
}
auto l0 = args[0];
if(contains(attributes, "padding"))
{
const std::string& pad_mode = attributes.at("padding").s();
std::vector<size_t> weight_dims = weights->get_shape().lens();
size_t weight_h = weight_dims[2];
size_t weight_w = weight_dims[3];
if(pad_mode.find("SAME") != std::string::npos)
{
op.padding_mode = op::padding_mode_t::same;
op.padding[0] = calculate_padding(weight_h, op.dilation[0]);
op.padding[1] = calculate_padding(weight_w, op.dilation[1]);
// op.padding_mode = op::padding_mode_t::same;
std::vector<size_t> weight_dims = weights->get_shape().lens();
size_t weight_h = weight_dims[2];
size_t weight_w = weight_dims[3];
auto input_dims = l0->get_shape().lens();
size_t input_h = input_dims[2];
size_t input_w = input_dims[3];
std::vector<int64_t> pads(input_dims.size());
calculate_padding(0, pads, input_h, op.stride[0], op.dilation[0], weight_h);
calculate_padding(1, pads, input_w, op.stride[1], op.dilation[1], weight_w);
if(pads[0] != pads[2] || pads[1] != pads[3])
{
std::vector<int64_t> padding = {0, 0, pads[0], pads[1], 0, 0, pads[2], pads[3]};
l0 = prog.add_instruction(migraphx::op::pad{padding}, l0);
}
else
{
op.padding[0] = pads[0];
op.padding[1] = pads[1];
}
}
else if(pad_mode.find("VALID") != std::string::npos)
{
......@@ -432,7 +467,7 @@ struct tf_parser
auto cweights = prog.add_instruction(op::contiguous{}, weights);
auto new_weights = prog.add_instruction(op::reshape{new_weights_shape}, cweights);
return prog.add_instruction(op, {args[0], new_weights});
return prog.add_instruction(op, {l0, new_weights});
}
instruction_ref
......@@ -567,21 +602,42 @@ struct tf_parser
op.lengths[0] = ksize[2];
op.lengths[1] = ksize[3];
}
auto l0 = args[0];
if(contains(attributes, "padding"))
{
const std::string& pad_mode = attributes.at("padding").s();
if(pad_mode.find("SAME") != std::string::npos)
{
op.padding_mode = op::padding_mode_t::same;
op.padding[0] = calculate_padding(op.lengths[0], 1);
op.padding[1] = calculate_padding(op.lengths[1], 1);
//op.padding_mode = op::padding_mode_t::same;
auto input_dims = l0->get_shape().lens();
size_t input_h = input_dims[2];
size_t input_w = input_dims[3];
std::vector<int64_t> pads(input_dims.size());
calculate_padding(0, pads, input_h, op.stride[0], 1, op.lengths[0]);
calculate_padding(1, pads, input_w, op.stride[1], 1, op.lengths[1]);
// for(auto pad : pads)
// {
// std::cout << pad << std::endl;
// }
if(pads[0] != pads[2] || pads[1] != pads[3])
{
std::vector<int64_t> padding = {0, 0, pads[0], pads[1], 0, 0, pads[2], pads[3]};
l0 = prog.add_instruction(migraphx::op::pad{padding, std::numeric_limits<float>::lowest()}, l0);
}
else
{
op.padding[0] = pads[0];
op.padding[1] = pads[1];
}
}
else if(pad_mode.find("VALID") != std::string::npos)
{
op.padding_mode = op::padding_mode_t::valid;
}
}
return prog.add_instruction(op, args[0]);
return prog.add_instruction(op, l0);
}
instruction_ref
......
......@@ -1573,6 +1573,26 @@ void manual_identity()
std::cout << result << std::endl;
}
void pad_test()
{
migraphx::program p;
std::vector<int8_t> data0 = {0, 1, 2, 3};
migraphx::shape s0{migraphx::shape::float_type, {2, 2}};
auto l0 = p.add_literal(migraphx::literal{s0, data0});
migraphx::op::pad op{};
op.value = std::numeric_limits<int8_t>::lowest();
op.pads = {0, 0, 1, 1};
p.add_instruction(op, l0);
p.compile(migraphx::gpu::target{});
migraphx::program::parameter_map m;
for(auto&& x : p.get_parameter_shapes())
{
m[x.first] = migraphx::gpu::to_gpu(migraphx::generate_argument(x.second));
}
auto result = migraphx::gpu::from_gpu(p.eval(m));
std::cout << result << std::endl;
}
void manual_test_concat_relu()
{
migraphx::program p;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment