"...include/git@developer.sourcefind.cn:gaoqiong/migraphx.git" did not exist on "f550da309922fb0b578edec9d8f36acea5296441"
binary.cpp 1.31 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
#include <migraphx/config.hpp>
#include <migraphx/cpu/dnnl.hpp>

namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
namespace cpu {

struct dnnl_binary : dnnl_op<dnnl_binary, dnnl::binary>
{
    std::string algo;
    template <class Self, class F>
    static auto reflect(Self& self, F f)
    {
14
        return pack_join(self.reflect_base(self, f), pack(f(self.algo, "algo")));
15
16
    }

Paul Fultz II's avatar
Paul Fultz II committed
17
18
    std::string group() const { return this->name() + "::" + algo; }

19
20
21
22
23
24
    std::string name() const { return "dnnl::binary"; }

    shape compute_shape(std::vector<shape> inputs) const
    {
        // Compensate for allocation
        inputs.pop_back();
25
        check_shapes{this->trim_post_op_inputs(inputs), *this}.has(2);
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
        auto s0 = inputs.at(0);
        auto s1 = inputs.at(1);
        auto r  = s0;
        if(s0 != s1 or !s0.packed())
        {
            r = shape{s0.type(), s0.lens()};
        }
        // Call to get_primitive to make sure an algo is available
        this->get_primitive(this->to_memory_desc(r, inputs));
        return r;
    }

    dnnl::binary::desc get_desc(const std::unordered_map<int, dnnl::memory::desc>& m) const
    {
        return {to_dnnl_algo(algo), m.at(DNNL_ARG_SRC_0), m.at(DNNL_ARG_SRC_1), m.at(DNNL_ARG_DST)};
    }
};

} // namespace cpu
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx