Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
MIGraphX
Commits
2a0ff223
"vscode:/vscode.git/clone" did not exist on "138acd6c9b4881f59c6a7ea229dec9e1af5ffc38"
Commit
2a0ff223
authored
Nov 24, 2021
by
Paul
Browse files
Add return
parent
1ac17a13
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
25 additions
and
4 deletions
+25
-4
src/targets/gpu/mlir.cpp
src/targets/gpu/mlir.cpp
+19
-2
test/gpu/mlir.cpp
test/gpu/mlir.cpp
+6
-2
No files found.
src/targets/gpu/mlir.cpp
View file @
2a0ff223
...
...
@@ -396,6 +396,23 @@ struct mlir_program
return
result
;
}
static
std
::
string
get_name
(
instruction_ref
ins
)
{
if
(
ins
->
name
()
==
"@return"
)
return
"std.return"
;
return
"migraphx."
+
ins
->
name
();
}
static
shape
get_shape
(
instruction_ref
ins
)
{
if
(
ins
->
name
()
==
"@return"
)
{
assert
(
ins
->
inputs
().
size
()
==
1
);
return
ins
->
inputs
().
front
()
->
get_shape
();
}
return
ins
->
get_shape
();
}
void
parse
(
const
module
&
m
)
{
auto
mbody
=
mlirModuleGetBody
(
mmodule
.
get
());
...
...
@@ -405,10 +422,10 @@ struct mlir_program
{
if
(
ins
->
name
()
==
"@param"
)
continue
;
auto
name
=
"migraphx."
+
ins
->
name
();
auto
name
=
get_
name
(
ins
);
auto
ops
=
create_operation_state
(
name
);
ops
.
add_attribute_value
(
ins
->
get_operator
().
to_value
());
ops
.
add_results
({
ins
->
get_shape
()});
ops
.
add_results
({
get_shape
(
ins
)});
std
::
vector
<
MlirValue
>
inputs
;
transform
(
...
...
test/gpu/mlir.cpp
View file @
2a0ff223
...
...
@@ -35,13 +35,15 @@ module {
func @main(%arg0: tensor<1x8x4x4xf32>, %arg1: tensor<2x8x3x3xf32>) -> tensor<1x2x2x2xf32> {
%0 = "migraphx.convolution"(%arg0, %arg1) {dilation = [1 : si64, 1 : si64], group = 1 : si64, padding = [0 : si64, 0 : si64], padding_mode = 0 : si64, stride = [1 : si64, 1 :
si64]} : (tensor<1x8x4x4xf32>, tensor<2x8x3x3xf32>) -> tensor<1x2x2x2xf32>
%1 = return %0 : tensor<1x2x2x2xf32>
}
}
)__migraphx__"
;
migraphx
::
module
m
;
auto
x
=
m
.
add_parameter
(
"x"
,
{
migraphx
::
shape
::
float_type
,
{
1
,
8
,
4
,
4
}});
auto
w
=
m
.
add_parameter
(
"w"
,
{
migraphx
::
shape
::
float_type
,
{
2
,
8
,
3
,
3
}});
m
.
add_instruction
(
migraphx
::
make_op
(
"convolution"
),
x
,
w
);
auto
conv
=
m
.
add_instruction
(
migraphx
::
make_op
(
"convolution"
),
x
,
w
);
m
.
add_return
({
conv
});
auto
s
=
migraphx
::
gpu
::
dump_mlir
(
m
);
// Skip test if MLIR is not enabled
if
(
s
.
empty
())
...
...
@@ -57,6 +59,7 @@ module {
%0 = "migraphx.convolution"(%arg0, %arg1) {dilation = [1 : si64, 1 : si64], group = 1 : si64, padding = [0 : si64, 0 : si64], padding_mode = 0 : si64, stride = [1 : si64, 1 : si64]} : (tensor<1x8x4x4xf32>, tensor<2x8x3x3xf32>) -> tensor<1x2x2x2xf32>
%1 = "migraphx.add"(%0, %arg2) : (tensor<1x2x2x2xf32>, tensor<1x2x2x2xf32>) -> tensor<1x2x2x2xf32>
%2 = "migraphx.relu"(%1) : (tensor<1x2x2x2xf32>) -> tensor<1x2x2x2xf32>
%3 = return %2 : tensor<1x2x2x2xf32>
}
}
)__migraphx__"
;
...
...
@@ -66,7 +69,8 @@ module {
auto
b
=
m
.
add_parameter
(
"b"
,
{
migraphx
::
shape
::
float_type
,
{
1
,
2
,
2
,
2
}});
auto
conv
=
m
.
add_instruction
(
migraphx
::
make_op
(
"convolution"
),
x
,
w
);
auto
add
=
m
.
add_instruction
(
migraphx
::
make_op
(
"add"
),
conv
,
b
);
m
.
add_instruction
(
migraphx
::
make_op
(
"relu"
),
add
);
auto
relu
=
m
.
add_instruction
(
migraphx
::
make_op
(
"relu"
),
add
);
m
.
add_return
({
relu
});
auto
s
=
migraphx
::
gpu
::
dump_mlir
(
m
);
// Skip test if MLIR is not enabled
if
(
s
.
empty
())
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment