Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
MIGraphX
Commits
92d281a2
Commit
92d281a2
authored
Jun 19, 2023
by
Artur Wojcik
Browse files
dnnl
parent
7589b47b
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
113 additions
and
76 deletions
+113
-76
src/targets/cpu/include/migraphx/cpu/dnnl.hpp
src/targets/cpu/include/migraphx/cpu/dnnl.hpp
+113
-76
No files found.
src/targets/cpu/include/migraphx/cpu/dnnl.hpp
View file @
92d281a2
...
...
@@ -95,7 +95,7 @@ template <class Derived, class Primitive>
struct
dnnl_op
:
auto_register_op
<
Derived
>
{
std
::
vector
<
post_op
>
post_ops
;
std
::
function
<
argument
(
context
&
ctx
,
const
std
::
vector
<
argument
>&
args
)
>
execute
;
std
::
function
<
argument
(
context
&
,
const
std
::
vector
<
argument
>&
)
>
execute
;
template
<
class
Self
,
class
F
>
static
auto
reflect_base
(
Self
&
self
,
F
f
)
...
...
@@ -284,7 +284,7 @@ struct dnnl_op : auto_register_op<Derived>
std
::
ptrdiff_t
output_alias
(
const
std
::
vector
<
shape
>&
shapes
)
const
{
return
shapes
.
size
()
-
1
;
return
static_cast
<
std
::
ptrdiff_t
>
(
shapes
.
size
()
-
1
)
;
}
value
compile
(
context
&
,
const
shape
&
output_shape
,
std
::
vector
<
shape
>
inputs
)
{
...
...
@@ -300,16 +300,60 @@ struct dnnl_op : auto_register_op<Derived>
{
// Compensate for allocation
inputs
.
pop_back
();
const
auto
&
self
=
static_cast
<
const
Derived
&>
(
*
this
);
auto
name
=
self
.
name
();
auto
md
=
to_memory_desc
(
output_shape
,
inputs
);
auto
prim
=
get_primitive
(
md
);
auto
arg_lookup
=
create_arg_map
(
inputs
.
size
());
#ifndef NDEBUG
auto
prim_attr
=
get_primitive_attr
(
md
);
// NOLINTNEXTLINE
execute
=
std
::
bind
(
&
dnnl_op
::
internal
,
this
,
output_shape
,
inputs
,
md
,
prim
,
arg_lookup
,
std
::
placeholders
::
_1
,
std
::
placeholders
::
_2
);
#else
// NOLINTNEXTLINE
execute
=
std
::
bind
(
&
dnnl_op
::
internal
,
this
,
md
,
prim
,
arg_lookup
,
std
::
placeholders
::
_1
,
std
::
placeholders
::
_2
);
#endif
execute
=
[
=
](
context
&
,
const
std
::
vector
<
argument
>&
args
)
{
}
std
::
vector
<
shape
>
trim_post_op_inputs
(
const
std
::
vector
<
shape
>&
inputs
)
const
{
auto
prim_input_size
=
inputs
.
size
()
-
this
->
get_extra_post_op_args
();
return
{
inputs
.
begin
(),
inputs
.
begin
()
+
prim_input_size
};
}
private:
#ifndef NDEBUG
argument
internal
(
const
shape
&
output_shape
,
const
std
::
vector
<
shape
>&
inputs
,
std
::
unordered_map
<
int
,
dnnl
::
memory
::
desc
>
md
,
Primitive
prim
,
std
::
vector
<
int
>
arg_lookup
,
context
&
,
const
std
::
vector
<
argument
>&
args
)
#else
argument
internal
(
std
::
unordered_map
<
int
,
dnnl
::
memory
::
desc
>
md
,
Primitive
prim
,
std
::
vector
<
int
>
arg_lookup
,
context
&
,
const
std
::
vector
<
argument
>&
args
)
#endif
{
#ifndef NDEBUG
const
auto
&
self
=
static_cast
<
const
Derived
&>
(
*
this
);
auto
name
=
self
.
name
();
auto
prim_attr
=
get_primitive_attr
(
md
);
// Check that the memory descriptors have not changed
auto
debug_args
=
args
;
debug_args
.
pop_back
();
...
...
@@ -332,8 +376,7 @@ struct dnnl_op : auto_register_op<Derived>
{
auto
arg
=
j
+
prim_input_size
;
auto
kind
=
pos
.
kind
(
i
);
std
::
string
mesg
=
"Post op "
+
std
::
to_string
(
i
)
+
"@"
+
std
::
to_string
(
arg
)
+
": "
;
std
::
string
mesg
=
"Post op "
+
std
::
to_string
(
i
)
+
"@"
+
std
::
to_string
(
arg
)
+
": "
;
try
{
dnnl
::
algorithm
algo
;
...
...
@@ -345,8 +388,8 @@ struct dnnl_op : auto_register_op<Derived>
{
pos
.
get_params_binary
(
i
,
algo
,
mdesc
);
if
(
mdesc
!=
md
.
at
(
arg_lookup
.
at
(
arg
)))
MIGRAPHX_THROW
(
mesg
+
"Memory descriptor doesn't match for binary
post op"
);
MIGRAPHX_THROW
(
mesg
+
"Memory descriptor doesn't match for binary "
"
post op"
);
j
++
;
}
else
if
(
kind
==
dnnl
::
primitive
::
kind
::
eltwise
)
...
...
@@ -379,12 +422,6 @@ struct dnnl_op : auto_register_op<Derived>
m
[
arg_lookup
[
i
]]
=
to_dnnl_memory
(
md
.
at
(
arg_lookup
[
i
]),
args
[
i
]);
prim
.
execute
(
get_dnnl_context
().
stream
,
m
);
return
args
.
back
();
};
}
std
::
vector
<
shape
>
trim_post_op_inputs
(
const
std
::
vector
<
shape
>&
inputs
)
const
{
auto
prim_input_size
=
inputs
.
size
()
-
this
->
get_extra_post_op_args
();
return
{
inputs
.
begin
(),
inputs
.
begin
()
+
prim_input_size
};
}
};
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment