Unverified Commit 305ae9a2 authored by Aaron Enye Shi's avatar Aaron Enye Shi Committed by GitHub
Browse files

Merge branch 'develop' into enable-miopen-hipclang

parents 93888cf8 dcbc9255
:
0 Placeholder*
dtype0*
shape:
:
1 Placeholder*
dtype0*
shape:
*
sqdiffSquaredDifference01*
T0"
\ No newline at end of file
:
0 Placeholder*
dtype0*
shape:
(
stopgradient StopGradient0*
T0"
\ No newline at end of file
......@@ -48,6 +48,21 @@ TEST_CASE(add_bcast_test)
EXPECT(p == prog);
}
TEST_CASE(batchmatmul_test)
{
migraphx::program p;
auto l0 = p.add_parameter("0", migraphx::shape{migraphx::shape::float_type, {1, 2, 8, 4}});
auto l1 = p.add_parameter("1", migraphx::shape{migraphx::shape::float_type, {1, 2, 4, 8}});
auto trans_l0 = p.add_instruction(migraphx::op::transpose{{0, 1, 3, 2}}, l0);
auto trans_l1 = p.add_instruction(migraphx::op::transpose{{0, 1, 3, 2}}, l1);
p.add_instruction(migraphx::op::dot{}, trans_l0, trans_l1);
auto prog = optimize_tf("batchmatmul_test.pb", false);
EXPECT(p == prog);
}
TEST_CASE(batchnorm_test)
{
float epsilon = 1.001e-5f;
......@@ -351,6 +366,16 @@ TEST_CASE(reshape_test)
EXPECT(p == prog);
}
TEST_CASE(rsqrt_test)
{
migraphx::program p;
auto l0 = p.add_parameter("0", migraphx::shape{migraphx::shape::float_type, {1, 3, 16, 16}});
p.add_instruction(migraphx::op::rsqrt{}, l0);
auto prog = optimize_tf("rsqrt_test.pb", false);
EXPECT(p == prog);
}
TEST_CASE(softmax_test)
{
migraphx::program p;
......@@ -364,6 +389,17 @@ TEST_CASE(softmax_test)
EXPECT(p == prog);
}
TEST_CASE(sqdiff_test)
{
migraphx::program p;
auto l0 = p.add_parameter("0", migraphx::shape{migraphx::shape::float_type, {1, 2, 2, 3}});
auto l1 = p.add_parameter("1", migraphx::shape{migraphx::shape::float_type, {1, 2, 2, 3}});
p.add_instruction(migraphx::op::sqdiff{}, l0, l1);
auto prog = optimize_tf("sqdiff_test.pb", false);
EXPECT(p == prog);
}
TEST_CASE(squeeze_test)
{
migraphx::program p;
......@@ -374,6 +410,16 @@ TEST_CASE(squeeze_test)
EXPECT(p == prog);
}
TEST_CASE(stopgradient_test)
{
migraphx::program p;
auto l0 = p.add_parameter("0", migraphx::shape{migraphx::shape::float_type, {1, 3, 16, 16}});
p.add_instruction(migraphx::op::identity{}, l0);
auto prog = optimize_tf("stopgradient_test.pb", false);
EXPECT(p == prog);
}
TEST_CASE(stridedslice_test)
{
migraphx::program p;
......@@ -414,4 +460,16 @@ TEST_CASE(tanh_test)
EXPECT(p == prog);
}
TEST_CASE(transpose_test)
{
migraphx::program p;
auto l0 = p.add_parameter("0", migraphx::shape{migraphx::shape::float_type, {1, 3, 16, 16}});
migraphx::shape s0{migraphx::shape::int32_type, {4}};
p.add_literal(migraphx::literal{s0, {0, 2, 3, 1}});
p.add_instruction(migraphx::op::transpose{{0, 2, 3, 1}}, l0);
auto prog = optimize_tf("transpose_test.pb", false);
EXPECT(p == prog);
}
int main(int argc, const char* argv[]) { test::run(argc, argv); }
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment