Commit 7809b341 authored by Ted Themistokleous's avatar Ted Themistokleous
Browse files

Backup of changes.

Unable to get dims() to populate correclty. fails at replace_instruction in
lowering
parent 834bb1bb
...@@ -86,10 +86,10 @@ void auto_contiguous::apply(module& m) const ...@@ -86,10 +86,10 @@ void auto_contiguous::apply(module& m) const
// perform a pass to insert contiguous for every reshape (without reshaper) before // perform a pass to insert contiguous for every reshape (without reshaper) before
// determining if aliasing can be performed // determining if aliasing can be performed
if(ins->name() == "reshape" and not is_reshaper_op(std::next(ins))) //if(ins->name() == "reshape" and not is_reshaper_op(std::next(ins)))
{ //{
m.insert_instruction(std::next(ins), make_op("contiguous"), ins); // m.insert_instruction(std::next(ins), make_op("contiguous"), ins);
} //}
shape s = ins->get_shape(); shape s = ins->get_shape();
if(not s.dynamic() and not s.standard() and s.elements() != 0) if(not s.dynamic() and not s.standard() and s.elements() != 0)
......
...@@ -233,6 +233,8 @@ struct reshape_lazy ...@@ -233,6 +233,8 @@ struct reshape_lazy
} }
} }
std::cout << rdims.size() << std::endl;
auto s = reshape_lazy_dims(inputs.front(), rdims); auto s = reshape_lazy_dims(inputs.front(), rdims);
if(not s.has_value()) if(not s.has_value())
MIGRAPHX_THROW("reshape_lazy on axis that is not packed."); MIGRAPHX_THROW("reshape_lazy on axis that is not packed.");
......
...@@ -602,40 +602,6 @@ struct find_reshape_cont ...@@ -602,40 +602,6 @@ struct find_reshape_cont
} }
}; };
// Remove the contiguous op performing the appropriate copy if we can instead alias the correct
// memory layout This removes a contiguous op as part of the pass
struct find_reshape_alias
{
auto matcher() const
{
return match::pointwise(
match::nargs(2),
match::either_arg(0, 1)(
match::name("contiguous")(match::args(match::name("reshape").bind("rsp")))
.bind("cont"),
match::any()));
}
void apply(module& m, const match::matcher_result& r) const
{
auto ins = r.result;
auto ins_cont = r.instructions["cont"];
auto in_ins = r.instructions["rsp"];
auto out_lens = in_ins->get_shape().lens();
std::vector<int64_t> out_dims(out_lens.begin(), out_lens.end());
// Contiguous is needed if output is non standard output from reshape occurs
if(std::all_of(ins_cont->inputs().begin(), ins_cont->inputs().end(), [](auto i) {
return i->get_shape().standard();
}))
{
m.replace_instruction(
ins, make_op("reshape_lazy", {{"dims", out_dims}}), ins->inputs());
}
}
};
// match sequence of transpose --> contiguous --> reshaper_op // match sequence of transpose --> contiguous --> reshaper_op
auto match_transpose_contiguous_reshaper() auto match_transpose_contiguous_reshaper()
{ {
...@@ -838,7 +804,6 @@ void simplify_reshapes::apply(module& m) const ...@@ -838,7 +804,6 @@ void simplify_reshapes::apply(module& m) const
match::find_matches(m, match::find_matches(m,
find_where_op{}, find_where_op{},
find_resize{}, find_resize{},
find_reshape_alias{},
find_reshape_cont{}, find_reshape_cont{},
find_nop_reshapes{}, find_nop_reshapes{},
find_reshaper{}, find_reshaper{},
......
...@@ -40,6 +40,7 @@ ...@@ -40,6 +40,7 @@
#include <migraphx/op/if_op.hpp> #include <migraphx/op/if_op.hpp>
#include <migraphx/op/reshape.hpp> #include <migraphx/op/reshape.hpp>
#include <migraphx/op/quant_dot.hpp> #include <migraphx/op/quant_dot.hpp>
#include <migraphx/op/reshape_lazy.hpp>
#include <migraphx/gpu/context.hpp> #include <migraphx/gpu/context.hpp>
#include <migraphx/gpu/lowering.hpp> #include <migraphx/gpu/lowering.hpp>
...@@ -89,7 +90,7 @@ struct miopen_apply ...@@ -89,7 +90,7 @@ struct miopen_apply
offload_copy = (mod == mpm->get_root_module()) ? pass->offload_copy : false; offload_copy = (mod == mpm->get_root_module()) ? pass->offload_copy : false;
add_generic_op("contiguous"); add_generic_op("contiguous");
add_generic_op("reshape_lazy");
add_extend_op("argmax"); add_extend_op("argmax");
add_extend_op("argmin"); add_extend_op("argmin");
add_extend_op("logsoftmax"); add_extend_op("logsoftmax");
...@@ -115,6 +116,7 @@ struct miopen_apply ...@@ -115,6 +116,7 @@ struct miopen_apply
add_neg_op(); add_neg_op();
add_nms_op(); add_nms_op();
add_select_module_op(); add_select_module_op();
add_reshape_lazy_op();
} }
void copy_params() const void copy_params() const
...@@ -376,6 +378,31 @@ struct miopen_apply ...@@ -376,6 +378,31 @@ struct miopen_apply
return mod->replace_instruction(ins, ins->get_operator(), inputs, ins->module_inputs()); return mod->replace_instruction(ins, ins->get_operator(), inputs, ins->module_inputs());
}); });
} }
/**
* Adds reshape lazy to reshape ops that can be aliased instead of copied
*/
void add_reshape_lazy_op()
{
apply_map.emplace("reshape", [=](instruction_ref ins) {
ins->debug_print();
/* Attempt lazy reshape to allow for aliasing. Potentially throws in get_shape if unable to alias */
return mod->replace_instruction(ins, make_op("reshape_lazy", {{"dims", {ins->get_operator().to_value()}}}), ins->inputs(), ins->module_inputs());
try
{
}
catch (...)
{
//std::cout << "catch reshape_lazy_fail" << std::endl;
/* can't alias so require an allocate for output and a contiguous */
auto s = ins->get_shape();
std::vector<instruction_ref> inputs = ins->inputs();
auto output = insert_allocation(ins, s);
return mod->insert_instruction(std::next(ins), make_op("gpu::contiguous"), ins, output);
}
});
}
}; };
void lowering::apply(module_pass_manager& mpm) const void lowering::apply(module_pass_manager& mpm) const
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment