Commit 47943895 authored by Paul's avatar Paul
Browse files

Merge branch 'develop' into mlir-c

parents bf3e958d c99be32c
......@@ -312,4 +312,18 @@ TEST_CASE(module_without_bypass)
EXPECT(found);
}
TEST_CASE(multiple_module_dependency)
{
// Test when an instruction from a submodule depends on previous module
migraphx::program p;
auto* mm = p.get_main_module();
auto* sub = p.create_module("sub");
auto l1 = mm->add_literal(migraphx::literal(3));
// second same literal to make sure instruction_ref is being compared, rather than the
// instructions
sub->add_literal(migraphx::literal(3));
sub->add_instruction(sum_op{}, l1, l1);
EXPECT((sub->validate() == sub->end()));
}
int main(int argc, const char* argv[]) { test::run(argc, argv); }
#include <migraphx/allocation_model.hpp>
#include <migraphx/replace_allocate.hpp>
#include <migraphx/dead_code_elimination.hpp>
#include <migraphx/pass_manager.hpp>
#include <migraphx/check_shapes.hpp>
#include <migraphx/argument.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/make_op.hpp>
#include <migraphx/stringutils.hpp>
#include <migraphx/register_op.hpp>
#include <basic_ops.hpp>
#include <test.hpp>
struct allocate_no_out : migraphx::auto_register_op<allocate_no_out>
{
migraphx::shape s{};
template <class Self, class F>
static auto reflect(Self& self, F f)
{
return migraphx::pack(f(self.s, "shape"));
}
std::string name() const { return "allocate_no_out"; }
migraphx::shape compute_shape(const std::vector<migraphx::shape>& inputs) const
{
migraphx::check_shapes{inputs, *this}.has(0);
return s;
}
migraphx::argument compute(migraphx::context&,
const migraphx::shape& output_shape,
const std::vector<migraphx::argument>&) const
{
return {output_shape};
}
};
struct allocate_with_out : migraphx::auto_register_op<allocate_with_out>
{
migraphx::shape s{};
template <class Self, class F>
static auto reflect(Self& self, F f)
{
return migraphx::pack(f(self.s, "shape"));
}
std::string name() const { return "allocate_with_out"; }
migraphx::shape compute_shape(const std::vector<migraphx::shape>& inputs) const
{
migraphx::check_shapes{inputs, *this}.has(0);
return s;
}
migraphx::argument compute(migraphx::context&,
const migraphx::shape& output_shape,
const std::vector<migraphx::argument>&) const
{
return {output_shape};
}
};
// allocation model that has no out params
struct allocation_no_out_model
{
std::string name() const { return "allocate_no_out"; }
migraphx::operation allocate(const migraphx::shape& s) const
{
return migraphx::make_op(name(), {{"shape", to_value(s)}});
}
migraphx::operation preallocate(const migraphx::shape&, const std::string&) const { return {}; }
std::string copy() const { return {}; }
bool needs_out_params() const { return false; }
};
// allocation model with out params
struct allocation_with_out_model
{
std::string name() const { return "allocate_with_out"; }
migraphx::operation allocate(const migraphx::shape& s) const
{
return migraphx::make_op(name(), {{"shape", to_value(s)}});
}
migraphx::operation preallocate(const migraphx::shape&, const std::string&) const { return {}; }
std::string copy() const { return {}; }
bool needs_out_params() const { return true; }
};
void run_pass(migraphx::module& m, migraphx::allocation_model model, bool offload_copy = false)
{
migraphx::run_passes(m,
{migraphx::replace_allocate{std::move(model), offload_copy},
migraphx::dead_code_elimination{}});
}
void run_pass(migraphx::program& p, migraphx::allocation_model model, bool offload_copy = false)
{
migraphx::run_passes(p,
{migraphx::replace_allocate{std::move(model), offload_copy},
migraphx::dead_code_elimination{}});
}
migraphx::module create_simple_program()
{
migraphx::module m;
migraphx::shape s{migraphx::shape::float_type, {5}};
auto x = m.add_parameter("x", s);
auto y = m.add_parameter("y", s);
auto alloc =
m.add_instruction(migraphx::make_op("allocate", {{"shape", migraphx::to_value(s)}}));
m.add_instruction(pass_op{}, alloc, x, y);
return m;
}
TEST_CASE(allocate_no_out)
{
migraphx::module m = create_simple_program();
run_pass(m, allocation_no_out_model{});
EXPECT(std::any_of(m.begin(), m.end(), [](const migraphx::instruction& ins) {
return migraphx::contains(ins.name(), "allocate_no_out");
}));
}
TEST_CASE(allocate_with_out_param)
{
migraphx::module m = create_simple_program();
run_pass(m, allocation_with_out_model{});
EXPECT(std::none_of(m.begin(), m.end(), [](const migraphx::instruction& ins) {
return migraphx::contains(ins.name(), "allocate");
}));
}
TEST_CASE(allocate_with_out_return)
{
migraphx::module m = create_simple_program();
m.add_return({std::prev(m.end())});
run_pass(m, allocation_with_out_model{});
EXPECT(std::none_of(m.begin(), m.end(), [](const migraphx::instruction& ins) {
return migraphx::contains(ins.name(), "allocate");
}));
}
TEST_CASE(allocate_with_out_no_params)
{
migraphx::module m;
migraphx::shape s{migraphx::shape::float_type, {5}};
auto x = m.add_parameter("x", s);
auto y = m.add_parameter("y", s);
auto z = m.add_parameter("z", s);
auto alloc =
m.add_instruction(migraphx::make_op("allocate", {{"shape", migraphx::to_value(s)}}));
auto pass1 = m.add_instruction(pass_op{}, alloc, x, y);
auto alloc2 =
m.add_instruction(migraphx::make_op("allocate", {{"shape", migraphx::to_value(s)}}));
m.add_instruction(pass_op{}, alloc2, z, pass1);
run_pass(m, allocation_with_out_model{});
EXPECT(std::any_of(m.begin(), m.end(), [](const migraphx::instruction& ins) {
return migraphx::contains(ins.name(), "allocate_with_out");
}));
}
TEST_CASE(if_allocate)
{
migraphx::program p;
auto* mm = p.get_main_module();
migraphx::shape cond_s{migraphx::shape::bool_type};
auto cond = mm->add_parameter("cond", cond_s);
migraphx::shape s{migraphx::shape::float_type, {5}};
auto x = mm->add_parameter("x", s);
auto y = mm->add_parameter("y", s);
auto* then_mod = p.create_module("If_0_if");
auto alloc = then_mod->add_instruction(
migraphx::make_op("allocate", {{"shape", migraphx::to_value(s)}}));
auto a1 = then_mod->add_instruction(pass_op{}, alloc, x);
then_mod->add_return({a1});
auto* else_mod = p.create_module("If_0_else");
auto alloc1 = else_mod->add_instruction(
migraphx::make_op("allocate", {{"shape", migraphx::to_value(s)}}));
auto a2 = else_mod->add_instruction(pass_op{}, alloc1, y);
else_mod->add_return({a2});
mm->add_instruction(migraphx::make_op("if"), {cond}, {then_mod, else_mod});
run_pass(p, allocation_with_out_model{});
EXPECT(std::any_of(mm->begin(), mm->end(), [](const migraphx::instruction& ins) {
return migraphx::contains(ins.name(), "allocate_with_out");
}));
}
int main(int argc, const char* argv[]) { test::run(argc, argv); }
......@@ -471,6 +471,15 @@ def relu6_test(g1):
tf.nn.relu6(g1_input, 'relu6')
@tf_test
def relu6_mismatch_test(g1):
with g1.as_default():
g1_input = tf.compat.v1.placeholder(tf.float16,
shape=(1, 3, 13, 37),
name='0')
tf.nn.relu6(g1_input, 'relu6')
@tf_test
def reshape_test(g1):
with g1.as_default():
......@@ -676,6 +685,7 @@ if __name__ == '__main__':
pow_test()
relu_test()
relu6_test()
relu6_mismatch_test()
reshape_test()
rsqrt_test()
shape_test()
......
:
0 Placeholder*
dtype0*
shape: %

relu6Relu60*
T0"
\ No newline at end of file
......@@ -706,6 +706,31 @@ TEST_CASE(relu6_test)
EXPECT(p == prog);
}
TEST_CASE(relu6_mismatch_test)
{
migraphx::program p;
auto* mm = p.get_main_module();
std::vector<size_t> input_lens{1, 3, 13, 37};
auto l0 = mm->add_parameter("0", migraphx::shape{migraphx::shape::half_type, input_lens});
auto min_val = mm->add_literal(0.0f);
auto max_val = mm->add_literal(6.0f);
auto l0_convert = mm->add_instruction(
migraphx::make_op("convert", {{"target_type", migraphx::shape::float_type}}), l0);
min_val = mm->add_instruction(migraphx::make_op("multibroadcast", {{"out_lens", input_lens}}),
min_val);
max_val = mm->add_instruction(migraphx::make_op("multibroadcast", {{"out_lens", input_lens}}),
max_val);
mm->add_instruction(migraphx::make_op("clip"), l0_convert, min_val, max_val);
auto prog = optimize_tf("relu6_mismatch_test.pb", false);
EXPECT(p == prog);
}
TEST_CASE(reshape_test)
{
migraphx::program p;
......
......@@ -28,6 +28,8 @@ struct allocation_model
operation allocate(const shape& s) const;
/// Create a preallocated operator for the given shape
operation preallocate(const shape& s, const std::string& id) const;
/// Check if outputs are to be inserted
bool needs_out_params() const;
};
#else
......@@ -37,7 +39,8 @@ interface('allocation_model',
virtual('name', returns='std::string', const=True),
virtual('copy', returns='std::string', const=True),
virtual('allocate', s='const shape&', returns='operation', const=True),
virtual('preallocate', s='const shape&', id='std::string', returns='operation', const=True)
virtual('preallocate', s='const shape&', id='std::string', returns='operation', const=True),
virtual('needs_out_params', returns='bool', const=True)
)
%>
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment