Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
MIGraphX
Commits
e41908ad
Commit
e41908ad
authored
Feb 04, 2019
by
Shucai Xiao
Browse files
clang format.
parent
22f8a479
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
27 additions
and
38 deletions
+27
-38
src/rewrite_rnn.cpp
src/rewrite_rnn.cpp
+8
-9
test/cpu_ops_test.cpp
test/cpu_ops_test.cpp
+19
-29
No files found.
src/rewrite_rnn.cpp
View file @
e41908ad
...
...
@@ -29,7 +29,7 @@ void rewrite_rnn::apply(program& prog) const
migraphx
::
shape
ih_shape
{
type
,
{
1
,
batch_size
,
hidden_size
}};
std
::
vector
<
float
>
data
(
ih_shape
.
elements
(),
0
);
auto
actv_funcs
=
compute_actv_funcs
(
ins
);
auto
actv_funcs
=
compute_actv_funcs
(
ins
);
auto
rnn_op
=
any_cast
<
op
::
rnn
>
(
ins
->
get_operator
());
op
::
rnn
::
rnn_direction_t
dicrt
=
rnn_op
.
direction
;
if
(
dicrt
==
op
::
rnn
::
bidirectional
)
...
...
@@ -135,8 +135,8 @@ void rewrite_rnn::apply(program& prog) const
ih
=
prog
.
add_literal
(
migraphx
::
literal
{
ih_shape
,
data
});
}
auto
ret
=
rnn_cell
(
is_forward
,
prog
,
ins
,
args
[
0
],
w
,
r
,
bias
,
ih
,
actv_funcs
.
at
(
0
));
auto
ret
=
rnn_cell
(
is_forward
,
prog
,
ins
,
args
[
0
],
w
,
r
,
bias
,
ih
,
actv_funcs
.
at
(
0
));
auto
last_output
=
prog
.
insert_instruction
(
ins
,
op
::
squeeze
{{
0
}},
ret
[
1
]);
// following logic is to ensure the last instruction is a
...
...
@@ -264,21 +264,20 @@ std::vector<instruction_ref> rewrite_rnn::rnn_cell(bool is_forward,
return
{
hidden_out
,
last_out
};
}
std
::
vector
<
operation
>
rewrite_rnn
::
compute_actv_funcs
(
instruction_ref
ins
)
const
{
auto
rnn_op
=
any_cast
<
op
::
rnn
>
(
ins
->
get_operator
());
// before rewrite the rnn operator, need to ensure
// we have 2 actv funcs. If less than 2, use the
// we have 2 actv funcs. If less than 2, use the
// algorithm in parse_rnn to make 2 actv functions
if
(
rnn_op
.
direction
==
op
::
rnn
::
bidirectional
)
if
(
rnn_op
.
direction
==
op
::
rnn
::
bidirectional
)
{
if
(
rnn_op
.
actv_funcs
.
size
()
==
0
)
if
(
rnn_op
.
actv_funcs
.
size
()
==
0
)
{
// default is tanh
return
{
op
::
tanh
{},
op
::
tanh
{}};
}
else
if
(
rnn_op
.
actv_funcs
.
size
()
==
1
)
else
if
(
rnn_op
.
actv_funcs
.
size
()
==
1
)
{
return
{
rnn_op
.
actv_funcs
.
at
(
0
),
rnn_op
.
actv_funcs
.
at
(
0
)};
}
...
...
@@ -289,7 +288,7 @@ std::vector<operation> rewrite_rnn::compute_actv_funcs(instruction_ref ins) cons
}
else
{
if
(
rnn_op
.
actv_funcs
.
size
()
==
0
)
if
(
rnn_op
.
actv_funcs
.
size
()
==
0
)
{
// default is tanh
return
{
op
::
tanh
{}};
...
...
test/cpu_ops_test.cpp
View file @
e41908ad
...
...
@@ -1458,10 +1458,7 @@ TEST_CASE(rnn_forward)
auto
und
=
p
.
add_instruction
(
migraphx
::
op
::
undefined
{});
auto
out_hs
=
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{},
migraphx
::
op
::
rnn
::
forward
,
clip
},
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{},
migraphx
::
op
::
rnn
::
forward
,
clip
},
seq
,
w
,
r
,
...
...
@@ -1598,10 +1595,7 @@ TEST_CASE(rnn_reverse)
auto
und
=
p
.
add_instruction
(
migraphx
::
op
::
undefined
{});
auto
out_hs
=
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{},
migraphx
::
op
::
rnn
::
reverse
,
clip
},
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{},
migraphx
::
op
::
rnn
::
reverse
,
clip
},
seq
,
w
,
r
,
...
...
@@ -1723,16 +1717,14 @@ TEST_CASE(rnn_bidirectional)
auto
bias
=
p
.
add_literal
(
migraphx
::
literal
{
b_shape
,
bias_data
});
auto
und
=
p
.
add_instruction
(
migraphx
::
op
::
undefined
{});
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{},
migraphx
::
op
::
rnn
::
bidirectional
,
clip
},
seq
,
w
,
r
,
bias
,
und
,
ih
);
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{},
migraphx
::
op
::
rnn
::
bidirectional
,
clip
},
seq
,
w
,
r
,
bias
,
und
,
ih
);
p
.
compile
(
migraphx
::
cpu
::
target
{});
auto
hs_concat
=
p
.
eval
({});
std
::
vector
<
float
>
hs_data
;
...
...
@@ -1774,17 +1766,15 @@ TEST_CASE(rnn_bidirectional)
auto
bias
=
p
.
add_literal
(
migraphx
::
literal
{
b_shape
,
bias_data
});
auto
und
=
p
.
add_instruction
(
migraphx
::
op
::
undefined
{});
auto
out_hs
=
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{
migraphx
::
op
::
tanh
{}},
migraphx
::
op
::
rnn
::
bidirectional
,
clip
},
seq
,
w
,
r
,
bias
,
und
,
ih
);
auto
out_hs
=
p
.
add_instruction
(
migraphx
::
op
::
rnn
{
hidden_size
,
{
migraphx
::
op
::
tanh
{}},
migraphx
::
op
::
rnn
::
bidirectional
,
clip
},
seq
,
w
,
r
,
bias
,
und
,
ih
);
p
.
add_instruction
(
migraphx
::
op
::
rnn_last_output
{},
out_hs
);
p
.
compile
(
migraphx
::
cpu
::
target
{});
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment