Commit d0174a6c authored by Ted Themistokleous's avatar Ted Themistokleous
Browse files

Work in progress. Getting segfaults now with eval

parent da81615c
...@@ -127,7 +127,7 @@ struct reshape ...@@ -127,7 +127,7 @@ struct reshape
} }
} }
return shape{inputs.front().type(), rdims}; return {inputs.front().type(), rdims};
} }
shape compute_shape(std::vector<shape> inputs) const shape compute_shape(std::vector<shape> inputs) const
...@@ -152,9 +152,9 @@ struct reshape ...@@ -152,9 +152,9 @@ struct reshape
assert(dyn_out.computed_shape.standard()); assert(dyn_out.computed_shape.standard());
argument result{dyn_out.computed_shape}; argument result{dyn_out.computed_shape};
auto resh = args[0].reshape_lazy(dyn_out.computed_shape); //auto resh = args[0](dyn_out.computed_shape);
visit_all(result, resh)([&](auto output, auto input) { visit_all(result, args[0])([&](auto output, auto input) {
shape_for_each(output.get_shape(), [&](const auto& idx) { shape_for_each(output.get_shape(), [&](const auto& idx) {
output(idx.begin(), idx.end()) = input(idx.begin(), idx.end()); output(idx.begin(), idx.end()) = input(idx.begin(), idx.end());
}); });
......
...@@ -233,7 +233,7 @@ struct reshape_lazy ...@@ -233,7 +233,7 @@ struct reshape_lazy
} }
} }
std::cout << rdims.size() << std::endl; //std::cout << rdims.size() << std::endl;
auto s = reshape_lazy_dims(inputs.front(), rdims); auto s = reshape_lazy_dims(inputs.front(), rdims);
if(not s.has_value()) if(not s.has_value())
......
...@@ -116,7 +116,7 @@ struct miopen_apply ...@@ -116,7 +116,7 @@ struct miopen_apply
add_neg_op(); add_neg_op();
add_nms_op(); add_nms_op();
add_select_module_op(); add_select_module_op();
add_reshape_lazy_op(); //add_reshape_lazy_op();
} }
void copy_params() const void copy_params() const
...@@ -382,27 +382,26 @@ struct miopen_apply ...@@ -382,27 +382,26 @@ struct miopen_apply
/** /**
* Adds reshape lazy to reshape ops that can be aliased instead of copied * Adds reshape lazy to reshape ops that can be aliased instead of copied
*/ */
void add_reshape_lazy_op() /*void add_reshape_lazy_op()
{ {
apply_map.emplace("reshape", [=](instruction_ref ins) { apply_map.emplace("reshape", [=](instruction_ref ins) {
ins->debug_print(); /* Attempt lazy reshape to allow for aliasing. Potentially throws in get_shape if unable
/* Attempt lazy reshape to allow for aliasing. Potentially throws in get_shape if unable to alias */ * to alias
return mod->replace_instruction(ins, make_op("reshape_lazy", {{"dims", {ins->get_operator().to_value()}}}), ins->inputs(), ins->module_inputs());
try try
{ {
auto lazy_ins = mod->replace_instruction(
ins,
make_op("reshape_lazy", {{"dims", {ins->get_operator().to_value().at("dims")}}}),
ins->inputs(),
ins->module_inputs());
return lazy_ins;
} }
catch (...) catch(...)
{ {
//std::cout << "catch reshape_lazy_fail" << std::endl; return ins;
/* can't alias so require an allocate for output and a contiguous */
auto s = ins->get_shape();
std::vector<instruction_ref> inputs = ins->inputs();
auto output = insert_allocation(ins, s);
return mod->insert_instruction(std::next(ins), make_op("gpu::contiguous"), ins, output);
} }
}); });
} }*/
}; };
void lowering::apply(module_pass_manager& mpm) const void lowering::apply(module_pass_manager& mpm) const
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment