"src/targets/cpu/sub.cpp" did not exist on "165d1a173e565801202cd67fd89d248028c6e475"
lowering.cpp 9.11 KB
Newer Older
1
#include <rocblas.h>
Paul's avatar
Paul committed
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
#include <migraphx/gpu/lowering.hpp>
#include <migraphx/manage_ptr.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/operators.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/shape_for_each.hpp>
#include <migraphx/gpu/miopen.hpp>
#include <migraphx/gpu/hip.hpp>
#include <migraphx/dfor.hpp>
#include <migraphx/gpu/device/contiguous.hpp>
#include <migraphx/gpu/device/add.hpp>
#include <migraphx/iterator_for.hpp>
#include <migraphx/gpu/rocblas.hpp>
#include <migraphx/gpu/context.hpp>
#include <migraphx/gpu/convolution.hpp>
#include <migraphx/gpu/contiguous.hpp>
#include <migraphx/gpu/relu.hpp>
#include <migraphx/gpu/leaky_relu.hpp>
#include <migraphx/gpu/softmax.hpp>
#include <migraphx/gpu/add.hpp>
22
#include <migraphx/gpu/sin.hpp>
23
24
25
#include <migraphx/gpu/sinh.hpp>
#include <migraphx/gpu/cosh.hpp>
#include <migraphx/gpu/tanh.hpp>
Paul's avatar
Paul committed
26
27
28
29
30
#include <migraphx/gpu/mul.hpp>
#include <migraphx/gpu/batchnorm.hpp>
#include <migraphx/gpu/pooling.hpp>
#include <migraphx/gpu/gemm.hpp>
#include <migraphx/gpu/concat.hpp>
Paul's avatar
Paul committed
31
#include <utility>
Paul's avatar
Paul committed
32

Paul's avatar
Paul committed
33
namespace migraphx {
34
inline namespace MIGRAPH_INLINE_NS {
Paul's avatar
Paul committed
35
namespace gpu {
Paul's avatar
Paul committed
36
37
38

struct miopen_apply
{
Paul's avatar
Paul committed
39
    program* prog = nullptr;
Paul's avatar
Paul committed
40
    context ctx{};
Paul's avatar
Paul committed
41

Paul's avatar
Paul committed
42
43
44
45
46
47
48
    void check_shape(shape x, instruction_ref i)
    {
        assert(x == i->get_shape());
        (void)x;
        (void)i;
    }

Paul's avatar
Paul committed
49
50
    void apply()
    {
Paul's avatar
Paul committed
51
52
        for(auto it = prog->begin(); it != prog->end(); it++)
        {
Paul's avatar
Paul committed
53
            auto s = it->get_shape();
Paul's avatar
Paul committed
54
            if(it->name() == "convolution")
Paul's avatar
Paul committed
55
            {
Paul's avatar
Paul committed
56
                check_shape(s, apply_convolution(it));
Paul's avatar
Paul committed
57
            }
Khalique's avatar
Khalique committed
58
            else if(it->name() == "relu")
Paul's avatar
Paul committed
59
            {
Khalique's avatar
Khalique committed
60
                check_shape(s, apply_relu(it));
Paul's avatar
Paul committed
61
            }
Khalique's avatar
Khalique committed
62
63
64
65
            else if(it->name() == "leaky_relu")
            {
                check_shape(s, apply_leaky_relu(it));
            }
Paul's avatar
Paul committed
66
            else if(it->name() == "pooling")
Paul's avatar
Paul committed
67
            {
Paul's avatar
Paul committed
68
                check_shape(s, apply_pooling(it));
Paul's avatar
Paul committed
69
            }
Paul's avatar
Paul committed
70
            else if(it->name() == "add")
Paul's avatar
Paul committed
71
            {
Paul's avatar
Paul committed
72
                check_shape(s, apply_add(it));
Paul's avatar
Paul committed
73
            }
74
75
76
77
            else if(it->name() == "sin")
            {
                check_shape(s, apply_sin(it));
            }
78
79
80
81
82
83
84
85
86
87
88
89
            else if(it->name() == "sinh")
            {
                check_shape(s, apply_sinh(it));
            }
            else if(it->name() == "cosh")
            {
                check_shape(s, apply_cosh(it));
            }
            else if(it->name() == "tanh")
            {
                check_shape(s, apply_tanh(it));
            }
Khalique's avatar
Khalique committed
90
91
92
93
            else if(it->name() == "mul")
            {
                check_shape(s, apply_mul(it));
            }
Shucai Xiao's avatar
Shucai Xiao committed
94
            else if(it->name() == "dot")
Paul's avatar
Paul committed
95
            {
Paul's avatar
Paul committed
96
                check_shape(s, apply_gemm(it));
Paul's avatar
Paul committed
97
            }
Paul's avatar
Paul committed
98
            else if(it->name() == "contiguous")
99
            {
Paul's avatar
Paul committed
100
                check_shape(s, apply_contiguous(it));
101
            }
102
103
104
105
            else if(it->name() == "concat")
            {
                check_shape(s, apply_concat(it));
            }
Paul's avatar
Paul committed
106
            else if(it->name() == "batch_norm_inference")
107
            {
Paul's avatar
Paul committed
108
                check_shape(s, apply_batch_norm_inference(it));
109
            }
Paul's avatar
Paul committed
110
111
112
113
            else if(it->name() == "softmax")
            {
                check_shape(s, apply_softmax(it));
            }
Paul's avatar
Paul committed
114
115
116
        }
    }

Paul's avatar
Paul committed
117
    instruction_ref insert_allocation(instruction_ref ins, const shape& s, std::string tag = "")
Paul's avatar
Paul committed
118
    {
Paul's avatar
Paul committed
119
        if(ins == --prog->end() and tag.empty())
Paul's avatar
Paul committed
120
121
122
123
124
        {
            return prog->add_parameter("output", s);
        }
        else
        {
Paul's avatar
Paul committed
125
            auto is     = prog->add_outline(s);
Paul's avatar
Paul committed
126
            auto result = prog->insert_instruction(ins, hip_allocate{std::move(tag)}, is);
Paul's avatar
Paul committed
127
128
129
130
            return result;
        }
    }

Paul's avatar
Paul committed
131
    instruction_ref apply_convolution(instruction_ref ins)
Paul's avatar
Paul committed
132
    {
wsttiger's avatar
wsttiger committed
133
        auto&& op = any_cast<op::convolution>(ins->get_operator());
Paul's avatar
Paul committed
134

Paul's avatar
Paul committed
135
        auto conv = miopen_convolution{op, make_conv(op)};
Paul's avatar
Paul committed
136
        auto ws   = conv.compile(ctx, ins->get_shape(), ins->inputs());
Paul's avatar
Paul committed
137

138
        auto workspace = insert_allocation(ins, ws, "workspace");
Paul's avatar
Paul committed
139
        auto output    = insert_allocation(ins, ins->get_shape());
Paul's avatar
Paul committed
140

Paul's avatar
Paul committed
141
        return prog->replace_instruction(
Paul's avatar
Paul committed
142
            ins, conv, ins->inputs().at(0), ins->inputs().at(1), workspace, output);
Paul's avatar
Paul committed
143
144
    }

Paul's avatar
Paul committed
145
    instruction_ref apply_pooling(instruction_ref ins)
Paul's avatar
Paul committed
146
    {
wsttiger's avatar
wsttiger committed
147
        auto&& op   = any_cast<op::pooling>(ins->get_operator());
Paul's avatar
Paul committed
148
        auto pd     = make_pooling(op);
Paul's avatar
Paul committed
149
        auto output = insert_allocation(ins, ins->get_shape());
Paul's avatar
Paul committed
150

Paul's avatar
Paul committed
151
        return prog->replace_instruction(
Paul's avatar
Paul committed
152
            ins, miopen_pooling{op, std::move(pd)}, ins->inputs().at(0), output);
Paul's avatar
Paul committed
153
154
    }

Khalique's avatar
Khalique committed
155
    instruction_ref apply_relu(instruction_ref ins)
Paul's avatar
Paul committed
156
    {
Khalique's avatar
Khalique committed
157
        auto ad = make_relu();
Khalique's avatar
Khalique committed
158
159
160
161

        auto output = insert_allocation(ins, ins->get_shape());
        return prog->replace_instruction(
            ins, miopen_relu{std::move(ad)}, ins->inputs().at(0), output);
Paul's avatar
Paul committed
162
    }
163

Khalique's avatar
Khalique committed
164
165
166
167
    instruction_ref apply_leaky_relu(instruction_ref ins)
    {
        auto&& op = any_cast<op::leaky_relu>(ins->get_operator());
        auto ad   = make_leaky_relu(op.alpha);
168

Khalique's avatar
Khalique committed
169
170
171
172
        auto output = insert_allocation(ins, ins->get_shape());
        return prog->replace_instruction(
            ins, miopen_leaky_relu{std::move(ad)}, ins->inputs().at(0), output);
    }
Paul's avatar
Paul committed
173

Paul's avatar
Paul committed
174
175
    instruction_ref apply_softmax(instruction_ref ins)
    {
wsttiger's avatar
wsttiger committed
176
        auto&& op   = any_cast<op::softmax>(ins->get_operator());
Paul's avatar
Paul committed
177
        auto output = insert_allocation(ins, ins->get_shape());
Paul's avatar
Paul committed
178
        return prog->replace_instruction(ins, miopen_softmax{op}, ins->inputs().at(0), output);
Paul's avatar
Paul committed
179
180
    }

Paul's avatar
Paul committed
181
    instruction_ref apply_add(instruction_ref ins)
Paul's avatar
Paul committed
182
    {
Paul's avatar
Paul committed
183
        auto output = insert_allocation(ins, ins->get_shape());
Paul's avatar
Paul committed
184
        return prog->replace_instruction(
Paul's avatar
Paul committed
185
            ins, hip_add{}, ins->inputs().at(0), ins->inputs().at(1), output);
Paul's avatar
Paul committed
186
    }
Paul's avatar
Paul committed
187

188
189
190
    instruction_ref apply_sin(instruction_ref ins)
    {
        auto output = insert_allocation(ins, ins->get_shape());
Shucai Xiao's avatar
Shucai Xiao committed
191
        return prog->replace_instruction(ins, hip_sin{}, ins->inputs().at(0), output);
192
193
    }

194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
    instruction_ref apply_sinh(instruction_ref ins)
    {
        auto output = insert_allocation(ins, ins->get_shape());
        return prog->replace_instruction(ins, hip_sinh{}, ins->inputs().at(0), output);
    }

    instruction_ref apply_cosh(instruction_ref ins)
    {
        auto output = insert_allocation(ins, ins->get_shape());
        return prog->replace_instruction(ins, hip_cosh{}, ins->inputs().at(0), output);
    }

    instruction_ref apply_tanh(instruction_ref ins)
    {
        auto output = insert_allocation(ins, ins->get_shape());
        return prog->replace_instruction(ins, hip_tanh{}, ins->inputs().at(0), output);
    }

Khalique's avatar
Khalique committed
212
213
214
215
216
217
218
    instruction_ref apply_mul(instruction_ref ins)
    {
        auto output = insert_allocation(ins, ins->get_shape());
        return prog->replace_instruction(
            ins, hip_mul{}, ins->inputs().at(0), ins->inputs().at(1), output);
    }

Paul's avatar
Paul committed
219
    instruction_ref apply_gemm(instruction_ref ins)
Paul's avatar
Paul committed
220
    {
Shucai Xiao's avatar
Shucai Xiao committed
221
        auto&& op   = any_cast<op::dot>(ins->get_operator());
Paul's avatar
Paul committed
222
        auto output = insert_allocation(ins, ins->get_shape());
Paul's avatar
Paul committed
223
        return prog->replace_instruction(
Paul's avatar
Paul committed
224
            ins, miopen_gemm{op}, ins->inputs().at(0), ins->inputs().at(1), output);
Paul's avatar
Paul committed
225
    }
226

Paul's avatar
Paul committed
227
    instruction_ref apply_contiguous(instruction_ref ins)
228
    {
wsttiger's avatar
wsttiger committed
229
        auto&& op   = any_cast<op::contiguous>(ins->get_operator());
Paul's avatar
Paul committed
230
        auto output = insert_allocation(ins, ins->get_shape());
Paul's avatar
Paul committed
231
        return prog->replace_instruction(ins, miopen_contiguous{op}, ins->inputs().at(0), output);
232
    }
233

234
235
    instruction_ref apply_concat(instruction_ref ins)
    {
wsttiger's avatar
wsttiger committed
236
237
        auto&& op                         = any_cast<op::concat>(ins->get_operator());
        auto output                       = insert_allocation(ins, ins->get_shape());
238
239
240
241
242
        std::vector<instruction_ref> refs = ins->inputs();
        refs.push_back(output);
        return prog->replace_instruction(ins, hip_concat{op}, refs);
    }

Paul's avatar
Paul committed
243
    instruction_ref apply_batch_norm_inference(instruction_ref ins)
244
    {
wsttiger's avatar
wsttiger committed
245
        auto&& op       = any_cast<op::batch_norm_inference>(ins->get_operator());
Paul's avatar
Paul committed
246
        auto output     = insert_allocation(ins, ins->get_shape());
Paul's avatar
Paul committed
247
        shape old_shape = ins->inputs().at(1)->get_shape();
wsttiger's avatar
wsttiger committed
248
        std::vector<int64_t> new_shape{1, static_cast<int64_t>(old_shape.elements()), 1, 1};
wsttiger's avatar
wsttiger committed
249
        auto reshape_op = op::reshape{new_shape};
Paul's avatar
Paul committed
250
        std::vector<instruction_ref> reshapes;
Paul's avatar
Paul committed
251
252
        std::transform(ins->inputs().begin() + 1,
                       ins->inputs().end(),
Paul's avatar
Paul committed
253
254
                       std::back_inserter(reshapes),
                       [&](auto i) { return prog->insert_instruction(ins, reshape_op, i); });
Paul's avatar
Paul committed
255
        return prog->replace_instruction(ins,
Paul's avatar
Paul committed
256
                                         miopen_batch_norm_inference{op},
Paul's avatar
Paul committed
257
                                         ins->inputs().at(0),
Paul's avatar
Paul committed
258
259
260
261
262
                                         reshapes[0],
                                         reshapes[1],
                                         reshapes[2],
                                         reshapes[3],
                                         output);
263
    }
Paul's avatar
Paul committed
264
265
};

Paul's avatar
Paul committed
266
void lowering::apply(program& p) const { miopen_apply{&p, ctx}.apply(); }
Paul's avatar
Paul committed
267
} // namespace gpu
268
} // namespace MIGRAPH_INLINE_NS
Paul's avatar
Paul committed
269
} // namespace migraphx