Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
MIGraphX
Commits
dc42e71c
Commit
dc42e71c
authored
Apr 03, 2019
by
Shucai Xiao
Browse files
merge change from develop branch
parents
80ee3402
900bad8b
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
204 additions
and
0 deletions
+204
-0
src/CMakeLists.txt
src/CMakeLists.txt
+1
-0
src/eliminate_pad.cpp
src/eliminate_pad.cpp
+58
-0
src/include/migraphx/eliminate_pad.hpp
src/include/migraphx/eliminate_pad.hpp
+30
-0
src/include/migraphx/op/pad.hpp
src/include/migraphx/op/pad.hpp
+7
-0
src/targets/gpu/target.cpp
src/targets/gpu/target.cpp
+3
-0
test/eliminate_pad_test.cpp
test/eliminate_pad_test.cpp
+105
-0
No files found.
src/CMakeLists.txt
View file @
dc42e71c
...
@@ -11,6 +11,7 @@ add_library(migraphx
...
@@ -11,6 +11,7 @@ add_library(migraphx
eliminate_contiguous.cpp
eliminate_contiguous.cpp
eliminate_concat.cpp
eliminate_concat.cpp
eliminate_identity.cpp
eliminate_identity.cpp
eliminate_pad.cpp
fwd_conv_batchnorm_rewrite.cpp
fwd_conv_batchnorm_rewrite.cpp
rewrite_rnn.cpp
rewrite_rnn.cpp
env.cpp
env.cpp
...
...
src/eliminate_pad.cpp
0 → 100644
View file @
dc42e71c
#include <migraphx/eliminate_pad.hpp>
#include <migraphx/program.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/op/convolution.hpp>
#include <migraphx/op/im2col.hpp>
#include <migraphx/op/pooling.hpp>
#include <migraphx/op/pad.hpp>
#include <migraphx/iterator_for.hpp>
#include <migraphx/stringutils.hpp>
namespace
migraphx
{
inline
namespace
MIGRAPHX_INLINE_NS
{
void
eliminate_pad
::
apply
(
program
&
p
)
const
{
for
(
auto
ins
:
iterator_for
(
p
))
{
const
std
::
string
&
op_name
=
ins
->
name
();
if
(
op_name
!=
"convolution"
and
op_name
!=
"im2col"
and
op_name
!=
"pooling"
)
continue
;
auto
input
=
ins
->
inputs
().
front
();
if
(
input
->
name
()
!=
"pad"
)
continue
;
if
(
op_name
==
"convolution"
)
update_op
(
op
::
convolution
{},
input
,
ins
,
p
);
else
if
(
op_name
==
"im2col"
)
update_op
(
op
::
im2col
{},
input
,
ins
,
p
);
else
if
(
op_name
==
"pooling"
)
update_op
(
op
::
pooling
{},
input
,
ins
,
p
);
}
}
template
<
class
T
>
void
eliminate_pad
::
update_op
(
T
,
const
instruction_ref
&
input
,
const
instruction_ref
&
ins
,
program
&
p
)
const
{
auto
pad_op
=
any_cast
<
op
::
pad
>
(
input
->
get_operator
());
if
(
!
pad_op
.
symmetric
())
return
;
std
::
vector
<
int64_t
>
pads
=
pad_op
.
pads
;
std
::
array
<
size_t
,
2
>
new_pads
{
static_cast
<
size_t
>
(
pads
[
2
]),
static_cast
<
size_t
>
(
pads
[
3
])};
T
op
=
any_cast
<
T
>
(
ins
->
get_operator
());
if
(
op
.
padding_mode
!=
op
::
padding_mode_t
::
default_
)
return
;
op
.
padding
=
new_pads
;
std
::
vector
<
instruction_ref
>
new_inputs
{
ins
->
inputs
()};
new_inputs
.
front
()
=
input
->
inputs
().
front
();
p
.
replace_instruction
(
ins
,
op
,
new_inputs
);
}
}
// namespace MIGRAPHX_INLINE_NS
}
// namespace migraphx
src/include/migraphx/eliminate_pad.hpp
0 → 100644
View file @
dc42e71c
#ifndef MIGRAPHX_GUARD_RTGLIB_ELIMINATE_PAD_HPP
#define MIGRAPHX_GUARD_RTGLIB_ELIMINATE_PAD_HPP
#include <string>
#include <vector>
#include <array>
#include <migraphx/instruction_ref.hpp>
#include <migraphx/config.hpp>
namespace
migraphx
{
inline
namespace
MIGRAPHX_INLINE_NS
{
struct
program
;
/**
* Remove pads if they can be written as an
* attribute to another op (im2col, convolution, pooling)
*/
struct
eliminate_pad
{
std
::
string
name
()
const
{
return
"eliminate_pad"
;
}
void
apply
(
program
&
p
)
const
;
template
<
class
T
>
void
update_op
(
T
,
const
instruction_ref
&
input
,
const
instruction_ref
&
ins
,
program
&
p
)
const
;
};
}
// namespace MIGRAPHX_INLINE_NS
}
// namespace migraphx
#endif
src/include/migraphx/op/pad.hpp
View file @
dc42e71c
...
@@ -50,6 +50,13 @@ struct pad
...
@@ -50,6 +50,13 @@ struct pad
shape
s
{
inputs
.
front
().
type
(),
rdims
};
shape
s
{
inputs
.
front
().
type
(),
rdims
};
return
s
;
return
s
;
}
}
bool
symmetric
()
const
{
std
::
size_t
num_dims
=
pads
.
size
()
/
2
;
return
std
::
equal
(
pads
.
begin
(),
pads
.
begin
()
+
num_dims
,
pads
.
begin
()
+
num_dims
,
pads
.
end
());
}
};
};
}
// namespace op
}
// namespace op
...
...
src/targets/gpu/target.cpp
View file @
dc42e71c
...
@@ -20,6 +20,7 @@
...
@@ -20,6 +20,7 @@
#include <migraphx/eliminate_identity.hpp>
#include <migraphx/eliminate_identity.hpp>
#include <migraphx/gpu/concat_gpu_opt.hpp>
#include <migraphx/gpu/concat_gpu_opt.hpp>
#include <migraphx/gpu/schedule_model.hpp>
#include <migraphx/gpu/schedule_model.hpp>
#include <migraphx/eliminate_pad.hpp>
#include <migraphx/schedule.hpp>
#include <migraphx/schedule.hpp>
namespace
migraphx
{
namespace
migraphx
{
...
@@ -36,6 +37,8 @@ std::vector<pass> target::get_passes(migraphx::context& gctx) const
...
@@ -36,6 +37,8 @@ std::vector<pass> target::get_passes(migraphx::context& gctx) const
{
{
dead_code_elimination
{},
dead_code_elimination
{},
eliminate_identity
{},
eliminate_identity
{},
eliminate_pad
{},
dead_code_elimination
{},
fwd_conv_batchnorm_rewrite
{},
fwd_conv_batchnorm_rewrite
{},
dead_code_elimination
{},
dead_code_elimination
{},
rewrite_rnn
{},
rewrite_rnn
{},
...
...
test/eliminate_pad_test.cpp
0 → 100644
View file @
dc42e71c
#include <migraphx/dead_code_elimination.hpp>
#include <migraphx/eliminate_pad.hpp>
#include <migraphx/instruction.hpp>
#include <basic_ops.hpp>
#include <migraphx/op/operators.hpp>
#include <test.hpp>
struct
eliminate_pad_target
{
std
::
string
name
()
const
{
return
"eliminate_pad"
;
}
std
::
vector
<
migraphx
::
pass
>
get_passes
(
migraphx
::
context
&
)
const
{
return
{
migraphx
::
eliminate_pad
{},
migraphx
::
dead_code_elimination
{}};
}
migraphx
::
context
get_context
()
const
{
return
{};
}
};
migraphx
::
instruction_ref
create_im2col
(
migraphx
::
instruction_ref
&
l_img
,
size_t
channels
,
migraphx
::
program
&
p
)
{
size_t
f
[
2
]
=
{
1
,
1
};
std
::
vector
<
int32_t
>
weights
(
channels
*
f
[
0
]
*
f
[
1
]);
migraphx
::
shape
s_weights
{
migraphx
::
shape
::
int32_type
,
{
1
,
channels
,
f
[
0
],
f
[
1
]}};
auto
l_weights
=
p
.
add_literal
(
migraphx
::
literal
{
s_weights
,
weights
});
return
p
.
add_instruction
(
migraphx
::
op
::
im2col
{},
l_img
,
l_weights
);
}
migraphx
::
instruction_ref
create_conv
(
migraphx
::
instruction_ref
&
l_img
,
size_t
channels
,
migraphx
::
program
&
p
,
migraphx
::
op
::
padding_mode_t
padding_mode
=
migraphx
::
op
::
padding_mode_t
::
default_
)
{
migraphx
::
shape
s_weights
{
migraphx
::
shape
::
int32_type
,
{
4
,
channels
,
3
,
3
}};
std
::
vector
<
int32_t
>
weights
(
4
*
channels
*
3
*
3
);
auto
l_weights
=
p
.
add_literal
(
migraphx
::
literal
{
s_weights
,
weights
});
migraphx
::
op
::
convolution
op
;
op
.
padding_mode
=
padding_mode
;
return
p
.
add_instruction
(
op
,
l_img
,
l_weights
);
}
TEST_CASE
(
rewrite_test
)
{
migraphx
::
program
p
;
size_t
img_dim
[
2
]
=
{
2
,
2
};
size_t
channels
=
1
;
std
::
vector
<
int32_t
>
input
(
channels
*
img_dim
[
0
]
*
img_dim
[
1
]);
std
::
iota
(
input
.
begin
(),
input
.
end
(),
0
);
migraphx
::
shape
s_img
{
migraphx
::
shape
::
int32_type
,
{
1
,
channels
,
img_dim
[
0
],
img_dim
[
1
]}};
auto
l_img
=
p
.
add_literal
(
migraphx
::
literal
{
s_img
,
input
});
auto
padded_img
=
p
.
add_instruction
(
migraphx
::
op
::
pad
{{
0
,
0
,
1
,
1
,
0
,
0
,
1
,
1
}},
l_img
);
auto
l0
=
create_im2col
(
padded_img
,
channels
,
p
);
auto
l1
=
create_conv
(
padded_img
,
channels
,
p
);
auto
l2
=
p
.
add_instruction
(
migraphx
::
op
::
pooling
{},
padded_img
);
p
.
add_instruction
(
migraphx
::
op
::
identity
{},
l0
,
l1
,
l2
);
p
.
compile
(
eliminate_pad_target
{});
EXPECT
(
std
::
none_of
(
p
.
begin
(),
p
.
end
(),
[](
const
migraphx
::
instruction
&
ins
)
{
return
ins
.
name
()
==
"pad"
;
}));
}
TEST_CASE
(
rewrite_test_asymmetric
)
{
migraphx
::
program
p
;
size_t
img_dim
[
2
]
=
{
2
,
2
};
size_t
channels
=
1
;
std
::
vector
<
int32_t
>
input
(
channels
*
img_dim
[
0
]
*
img_dim
[
1
]);
std
::
iota
(
input
.
begin
(),
input
.
end
(),
0
);
migraphx
::
shape
s_img
{
migraphx
::
shape
::
int32_type
,
{
1
,
channels
,
img_dim
[
0
],
img_dim
[
1
]}};
auto
l_img
=
p
.
add_literal
(
migraphx
::
literal
{
s_img
,
input
});
auto
padded_img
=
p
.
add_instruction
(
migraphx
::
op
::
pad
{{
0
,
0
,
0
,
0
,
0
,
0
,
2
,
2
}},
l_img
);
create_im2col
(
padded_img
,
channels
,
p
);
p
.
compile
(
eliminate_pad_target
{});
EXPECT
(
std
::
any_of
(
p
.
begin
(),
p
.
end
(),
[](
const
migraphx
::
instruction
&
ins
)
{
return
ins
.
name
()
==
"pad"
;
}));
}
TEST_CASE
(
rewrite_test_same_padding
)
{
migraphx
::
program
p
;
size_t
img_dim
[
2
]
=
{
2
,
2
};
size_t
channels
=
1
;
std
::
vector
<
int32_t
>
input
(
channels
*
img_dim
[
0
]
*
img_dim
[
1
]);
std
::
iota
(
input
.
begin
(),
input
.
end
(),
0
);
migraphx
::
shape
s_img
{
migraphx
::
shape
::
int32_type
,
{
1
,
channels
,
img_dim
[
0
],
img_dim
[
1
]}};
auto
l_img
=
p
.
add_literal
(
migraphx
::
literal
{
s_img
,
input
});
auto
padded_img
=
p
.
add_instruction
(
migraphx
::
op
::
pad
{{
0
,
0
,
1
,
1
,
0
,
0
,
1
,
1
}},
l_img
);
create_conv
(
padded_img
,
channels
,
p
,
migraphx
::
op
::
padding_mode_t
::
same
);
p
.
compile
(
eliminate_pad_target
{});
EXPECT
(
std
::
any_of
(
p
.
begin
(),
p
.
end
(),
[](
const
migraphx
::
instruction
&
ins
)
{
return
ins
.
name
()
==
"pad"
;
}));
}
int
main
(
int
argc
,
const
char
*
argv
[])
{
test
::
run
(
argc
,
argv
);
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment