onnx.cpp 28.8 KB
Newer Older
Paul's avatar
Paul committed
1
2
3
4
5
6
7
8
#include <google/protobuf/text_format.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <onnx.pb.h>
#include <iostream>
#include <fstream>
#include <unordered_map>
#include <functional>
#include <array>
Paul's avatar
Paul committed
9
#include <utility>
10
#include <vector>
Paul's avatar
Paul committed
11

Paul's avatar
Paul committed
12
13
14
15
16
17
18
19
#include <migraphx/fallthrough.hpp>
#include <migraphx/program.hpp>
#include <migraphx/operators.hpp>
#include <migraphx/ranges.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/config.hpp>

namespace migraphx {
20
inline namespace MIGRAPH_INLINE_NS {
Paul's avatar
Paul committed
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
struct unknown
{
    std::string op;
    std::string name() const { return "unknown:" + op; }
    shape compute_shape(std::vector<shape> input) const
    {
        if(input.empty())
            return {};
        else
            return input.front();
    }
    friend std::ostream& operator<<(std::ostream& os, const unknown& x)
    {
        os << x.name();
        return os;
    }
};

struct onnx_parser
{
    using attribute_map = std::unordered_map<std::string, onnx::AttributeProto>;
    using node_map      = std::unordered_map<std::string, onnx::NodeProto>;
Paul's avatar
Paul committed
43
    using op_func = std::function<instruction_ref(attribute_map, std::vector<instruction_ref>)>;
Paul's avatar
Paul committed
44
45
    node_map nodes;
    std::unordered_map<std::string, instruction_ref> instructions;
Scott Thornton's avatar
Scott Thornton committed
46
    program prog    = program();
47
    bool is_pytorch = false;
Paul's avatar
Paul committed
48
49
50
51
52

    std::unordered_map<std::string, op_func> ops;

    onnx_parser()
    {
Shucai Xiao's avatar
Shucai Xiao committed
53
        add_generic_op("MatMul", op::dot{});
Khalique's avatar
Khalique committed
54
        add_generic_op("Relu", op::relu{});
Khalique's avatar
Khalique committed
55
56
        // disable dropout for inference
        add_generic_op("Dropout", op::identity{});
Khalique's avatar
Khalique committed
57
        add_generic_op("Identity", op::identity{});
Paul's avatar
Paul committed
58

59
60
61
62
63
        add_broadcastable_binary_op("Add", op::add{});
        add_broadcastable_binary_op("Div", op::div{});
        add_broadcastable_binary_op("Mul", op::mul{});
        add_broadcastable_binary_op("Sub", op::sub{});
        add_broadcastable_binary_op("Sum", op::add{});
Paul's avatar
Paul committed
64

Khalique's avatar
Khalique committed
65
        add_mem_op("ImageScaler", &onnx_parser::parse_imagescaler);
66
        add_mem_op("LeakyRelu", &onnx_parser::parse_leaky_relu);
Paul's avatar
Paul committed
67
68
        add_mem_op("Constant", &onnx_parser::parse_constant);
        add_mem_op("Conv", &onnx_parser::parse_conv);
Paul's avatar
Paul committed
69
70
        add_mem_op("MaxPool", &onnx_parser::parse_pooling);
        add_mem_op("AveragePool", &onnx_parser::parse_pooling);
71
72
        add_mem_op("GlobalMaxPool", &onnx_parser::parse_pooling);
        add_mem_op("GlobalAveragePool", &onnx_parser::parse_pooling);
Paul's avatar
Paul committed
73
        add_mem_op("Reshape", &onnx_parser::parse_reshape);
Paul's avatar
Paul committed
74
75
        add_mem_op("Flatten", &onnx_parser::parse_flatten);
        add_mem_op("Gemm", &onnx_parser::parse_gemm);
76
        add_mem_op("BatchNormalization", &onnx_parser::parse_batchnorm);
Paul's avatar
Paul committed
77
        add_mem_op("Softmax", &onnx_parser::parse_softmax);
78
79
80
        add_mem_op("Squeeze", &onnx_parser::parse_squeeze);
        add_mem_op("Unsqueeze", &onnx_parser::parse_unsqueeze);
        add_mem_op("Slice", &onnx_parser::parse_slice);
Scott Thornton's avatar
Scott Thornton committed
81
        add_mem_op("Concat", &onnx_parser::parse_concat);
Khalique's avatar
Khalique committed
82
        add_mem_op("Transpose", &onnx_parser::parse_transpose);
Paul's avatar
Paul committed
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
    }

    template <class F>
    void add_op(std::string name, F f)
    {
        ops.emplace(name, f);
    }

    template <class F>
    void add_mem_op(std::string name, F f)
    {
        ops.emplace(name, [=](auto&&... xs) {
            return std::mem_fn(f)(*this, name, std::forward<decltype(xs)>(xs)...);
        });
    }
98
99
100
101
    template <class T>
    void add_broadcastable_binary_op(std::string name, T x)
    {
        ops.emplace(name, [this, x](attribute_map attributes, std::vector<instruction_ref> args) {
Scott Thornton's avatar
Scott Thornton committed
102
            if(args.size() != 2)
Paul's avatar
Paul committed
103
                MIGRAPH_THROW("binary operators should have 2 operands");
104
105
106
107
108
109
110
111
112
113
114
115
            if(contains(attributes, "broadcast"))
            {
                uint64_t broadcasted = parse_value(attributes.at("broadcast")).at<uint64_t>();
                if(broadcasted != 0)
                {
                    uint64_t axis = (contains(attributes, "axis"))
                                        ? parse_value(attributes.at("axis")).at<uint64_t>()
                                        : 0;
                    auto l =
                        prog.add_instruction(op::broadcast{axis, args[0]->get_shape()}, args[1]);
                    return prog.add_instruction(x, args[0], l);
                }
116
                return prog.add_instruction(x, args);
117
            }
118
            else if(args[0]->get_shape() != args[1]->get_shape())
119
120
121
122
            {
                // Example:
                // s0 = (3,2,4,5) and s1 = (2,1,1)
                //
Scott Thornton's avatar
Scott Thornton committed
123
124
                // In this case we need to broadcast (:,1,1) portion of
                // s1 plus broadcast the 1st dimension of s1
125
126
127
128
129
130
131
132
133
                // giving output_lens = (3,2,4,5)
                //
                // Another example:
                // s0 = (3,2,1,5) and s1 = (2,7,5)
                // In this case we need to broadcast the (:,:,1:,:) axis
                // of s0 plus the 1st dimension of s1 giving
                // output_lens = (3,2,7,5)
                //
                // Get lengths for both arguments
Paul's avatar
Paul committed
134
135
136
137
138
139
                const std::vector<std::size_t>* s0 = &args[0]->get_shape().lens();
                const std::vector<std::size_t>* s1 = &args[1]->get_shape().lens();

                // Make sure s0 is the smaller size
                if(s0->size() > s1->size())
                    std::swap(s0, s1);
140
141

                // Copy the larger vector to output_lens
142
                std::vector<std::size_t> output_lens = *s1;
Scott Thornton's avatar
Scott Thornton committed
143
                auto offset                          = s1->size() - s0->size();
Paul's avatar
Paul committed
144
145
146
147
148
149
                std::transform(s0->begin(),
                               s0->end(),
                               s1->begin() + offset,
                               output_lens.begin() + offset,
                               [](auto a, auto b) { return std::max(a, b); });

150
151
152
                auto l0 = prog.add_instruction(op::multibroadcast{output_lens}, args[0]);
                auto l1 = prog.add_instruction(op::multibroadcast{output_lens}, args[1]);
                return prog.add_instruction(x, l0, l1);
Paul's avatar
Paul committed
153
154
            }
            else
155
156
            {
                return prog.add_instruction(x, args);
157
158
159
160
            }
        });
    }

Paul's avatar
Paul committed
161
    template <class T>
Paul's avatar
Paul committed
162
163
    void add_generic_op(std::string name, T x)
    {
Paul's avatar
Paul committed
164
        ops.emplace(name, [this, x](attribute_map, std::vector<instruction_ref> args) {
Paul's avatar
Paul committed
165
166
167
168
            return prog.add_instruction(x, args);
        });
    }

Paul's avatar
Paul committed
169
    instruction_ref
Paul's avatar
Paul committed
170
    parse_softmax(const std::string&, const attribute_map&, std::vector<instruction_ref> args)
Paul's avatar
Paul committed
171
172
    {
        auto dims = args.front()->get_shape().lens();
Scott Thornton's avatar
Scott Thornton committed
173
174
        auto r =
            prog.add_instruction(op::reshape{{long(dims[0]), long(dims[1]), 1, 1}}, args.front());
175
176
        auto s = prog.add_instruction(op::softmax{}, r);
        return prog.add_instruction(op::reshape{{long(dims[0]), long(dims[1])}}, s);
Paul's avatar
Paul committed
177
178
    }

Paul's avatar
Paul committed
179
    instruction_ref
Paul's avatar
Paul committed
180
    parse_conv(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
Paul's avatar
Paul committed
181
    {
182
        op::convolution op;
Paul's avatar
Paul committed
183
184
        if(contains(attributes, "pads"))
        {
Scott Thornton's avatar
Scott Thornton committed
185
            if(contains(attributes, "auto_pad"))
186
187
188
189
190
            {
                MIGRAPH_THROW("auto_pad and padding cannot be specified simultaneously");
            }
            std::vector<std::size_t> padding(4);
            copy(attributes["pads"].ints(), padding.begin());
Scott Thornton's avatar
Scott Thornton committed
191
            if(padding.size() != 4)
192
193
194
            {
                MIGRAPH_THROW("padding should have 4 values");
            }
Scott Thornton's avatar
Scott Thornton committed
195
            if(padding[0] != padding[2] || padding[1] != padding[3])
196
197
198
199
200
            {
                MIGRAPH_THROW("migraphx does not support asymetric padding");
            }
            op.padding[0] = padding[0];
            op.padding[1] = padding[1];
Paul's avatar
Paul committed
201
        }
Paul's avatar
Paul committed
202
203
204
205
206
207
208
209
        if(contains(attributes, "strides"))
        {
            copy(attributes["strides"].ints(), op.stride.begin());
        }
        if(contains(attributes, "dilations"))
        {
            copy(attributes["dilations"].ints(), op.dilation.begin());
        }
Scott Thornton's avatar
Scott Thornton committed
210
        if(contains(attributes, "auto_pad"))
211
212
        {
            auto s = attributes["auto_pad"].s();
Scott Thornton's avatar
Scott Thornton committed
213
            if(contains(attributes, "pads") and to_upper(s) != "NOTSET")
214
215
216
217
            {
                MIGRAPH_THROW("auto_pad and padding cannot be specified simultaneously");
            }

Scott Thornton's avatar
Scott Thornton committed
218
            if(s.find("SAME") >= 0)
219
220
221
222
            {
                op.padding_mode = op::convolution::same;
            }
        }
Paul's avatar
Paul committed
223
224
225
226
        if(args.size() == 3)
        {
            uint64_t axis = 1;
            auto l1       = prog.add_instruction(op, args[0], args[1]);
Scott Thornton's avatar
Scott Thornton committed
227
            auto l2       = prog.add_instruction(op::broadcast{axis, l1->get_shape()}, args[2]);
228
            return prog.add_instruction(op::add{}, l1, l2);
Paul's avatar
Paul committed
229
        }
Paul's avatar
Paul committed
230
231
        return prog.add_instruction(op, args);
    }
Paul's avatar
Paul committed
232

Paul's avatar
Paul committed
233
234
235
    instruction_ref parse_pooling(const std::string& name,
                                  attribute_map attributes,
                                  std::vector<instruction_ref> args)
Paul's avatar
Paul committed
236
    {
Khalique's avatar
Khalique committed
237
238
        op::pooling op{ends_with(name, "MaxPool") ? "max" : "average"};
        if(starts_with(name, "Global"))
239
        {
Khalique's avatar
Khalique committed
240
241
            auto lens  = args.front()->get_shape().lens();
            op.lengths = {lens[2], lens[3]};
242
        }
Paul's avatar
Paul committed
243
244
        if(contains(attributes, "pads"))
        {
245
246
            std::vector<std::size_t> padding(4);
            copy(attributes["pads"].ints(), padding.begin());
Scott Thornton's avatar
Scott Thornton committed
247
            if(padding.size() != 4)
248
249
250
            {
                MIGRAPH_THROW("padding should have 4 values");
            }
Scott Thornton's avatar
Scott Thornton committed
251
            if(padding[0] != padding[2] || padding[1] != padding[3])
252
253
254
255
256
            {
                MIGRAPH_THROW("migraphx does not support asymetric padding");
            }
            op.padding[0] = padding[0];
            op.padding[1] = padding[1];
Paul's avatar
Paul committed
257
258
259
260
261
262
263
264
265
        }
        if(contains(attributes, "strides"))
        {
            copy(attributes["strides"].ints(), op.stride.begin());
        }
        if(contains(attributes, "kernel_shape"))
        {
            copy(attributes["kernel_shape"].ints(), op.lengths.begin());
        }
Scott Thornton's avatar
Scott Thornton committed
266
        if(contains(attributes, "auto_pad"))
267
268
        {
            auto s = attributes["auto_pad"].s();
Scott Thornton's avatar
Scott Thornton committed
269
            if(to_upper(s) != "NOTSET")
270
271
272
273
274
            {
                MIGRAPH_THROW("auto_pad is not supported for pooling");
            }
        }

Paul's avatar
Paul committed
275
        return prog.add_instruction(op, std::move(args));
Paul's avatar
Paul committed
276
277
    }

Paul's avatar
Paul committed
278
    instruction_ref
Paul's avatar
Paul committed
279
    parse_reshape(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
Paul's avatar
Paul committed
280
    {
281
        op::reshape op;
Paul's avatar
Paul committed
282
283
284
285
286
287
288
        if(args.size() == 1)
        {
            literal s = parse_value(attributes.at("shape"));
            s.visit([&](auto v) { copy(v, std::back_inserter(op.dims)); });
        }
        if(args.size() == 2)
        {
Paul's avatar
Paul committed
289
            literal s = args[1]->get_literal();
Paul's avatar
Paul committed
290
            s.visit([&](auto v) { copy(v, std::back_inserter(op.dims)); });
Paul's avatar
Paul committed
291
        }
Paul's avatar
Paul committed
292
293
294
        return prog.add_instruction(op, args[0]);
    }

Paul's avatar
Paul committed
295
    instruction_ref
Paul's avatar
Paul committed
296
    parse_flatten(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
Paul's avatar
Paul committed
297
298
    {
        uint64_t axis = 0;
Paul's avatar
Paul committed
299
300
301
302
        if(contains(attributes, "axis"))
        {
            axis = parse_value(attributes.at("axis")).at<int>();
        }
303
        return prog.add_instruction(op::flatten{axis}, args[0]);
Paul's avatar
Paul committed
304
305
    }

306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
    instruction_ref
    parse_squeeze(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
    {
        op::squeeze op;
        literal s = parse_value(attributes.at("axes"));
        s.visit([&](auto v) { copy(v, std::back_inserter(op.axes)); });
        return prog.add_instruction(op, args[0]);
    }

    instruction_ref
    parse_unsqueeze(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
    {
        op::unsqueeze op;
        literal s = parse_value(attributes.at("axes"));
        s.visit([&](auto v) { copy(v, std::back_inserter(op.axes)); });
        return prog.add_instruction(op, args[0]);
    }

Scott Thornton's avatar
Scott Thornton committed
324
325
326
327
328
329
330
    instruction_ref
    parse_concat(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
    {
        std::size_t axis = parse_value(attributes.at("axis")).at<int>();
        op::concat op{axis};
        return prog.add_instruction(op, std::move(args));
    }
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351

    instruction_ref
    parse_slice(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
    {
        op::slice op;
        if(contains(attributes, "axes"))
        {
            literal s = parse_value(attributes.at("axes"));
            s.visit([&](auto v) { copy(v, std::back_inserter(op.axes)); });
        }
        {
            literal s = parse_value(attributes.at("ends"));
            s.visit([&](auto v) { copy(v, std::back_inserter(op.ends)); });
        }
        {
            literal s = parse_value(attributes.at("starts"));
            s.visit([&](auto v) { copy(v, std::back_inserter(op.starts)); });
        }
        return prog.add_instruction(op, args[0]);
    }

Paul's avatar
Paul committed
352
353
354
    instruction_ref parse_constant(const std::string&,
                                   attribute_map attributes,
                                   const std::vector<instruction_ref>&)
Paul's avatar
Paul committed
355
356
357
358
    {
        literal v = parse_value(attributes.at("value"));
        return prog.add_literal(v);
    }
Paul's avatar
Paul committed
359

Paul's avatar
Paul committed
360
    instruction_ref
Paul's avatar
Paul committed
361
    parse_gemm(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
Paul's avatar
Paul committed
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
    {
        float alpha = 1.0f;
        float beta  = 0.0f;
        bool transa = false;
        bool transb = false;
        if(contains(attributes, "alpha"))
        {
            alpha = parse_value(attributes.at("alpha")).at<float>();
        }
        if(contains(attributes, "beta"))
        {
            alpha = parse_value(attributes.at("beta")).at<float>();
        }
        if(contains(attributes, "transA"))
        {
            transa = parse_value(attributes.at("transA")).at<bool>();
        }
        if(contains(attributes, "transB"))
        {
            transb = parse_value(attributes.at("transB")).at<bool>();
        }
        std::vector<int64_t> perm = {1, 0};
384
385
        auto l1 = (transa) ? prog.add_instruction(op::transpose{perm}, args[0]) : args[0];
        auto l2 = (transb) ? prog.add_instruction(op::transpose{perm}, args[1]) : args[1];
Paul's avatar
Paul committed
386
387
388
        if(args.size() == 3)
        {
            uint64_t axis = 1;
Shucai Xiao's avatar
Shucai Xiao committed
389
            auto l3       = prog.add_instruction(op::dot{alpha, beta}, l1, l2);
Scott Thornton's avatar
Scott Thornton committed
390
            auto l4       = prog.add_instruction(op::broadcast{axis, l3->get_shape()}, args[2]);
391
            return prog.add_instruction(op::add{}, l3, l4);
Paul's avatar
Paul committed
392
        }
Shucai Xiao's avatar
Shucai Xiao committed
393
        return prog.add_instruction(op::dot{alpha, beta}, l1, l2);
Paul's avatar
Paul committed
394
395
    }

396
    instruction_ref
Paul's avatar
Paul committed
397
    parse_batchnorm(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
398
    {
Scott Thornton's avatar
Scott Thornton committed
399
400
        float epsilon                                     = 1e-5f;
        float momentum                                    = 0.9f;
401
        op::batch_norm_inference::bn_infer_mode_t bn_mode = op::batch_norm_inference::spatial;
Scott Thornton's avatar
Scott Thornton committed
402
        bool is_test                                      = false;
403
404
405
406
407
408
        if(contains(attributes, "epsilon"))
        {
            epsilon = parse_value(attributes.at("epsilon")).at<float>();
        }
        if(contains(attributes, "momentum"))
        {
409
            momentum = parse_value(attributes.at("momentum")).at<float>();
410
411
412
        }
        if(contains(attributes, "is_test"))
        {
wsttiger's avatar
wsttiger committed
413
            is_test = parse_value(attributes.at("is_test")).at<uint64_t>() > 0;
414
415
416
        }
        if(contains(attributes, "spatial"))
        {
417
            bn_mode = (parse_value(attributes.at("spatial")).at<uint64_t>() > 0)
418
419
                          ? op::batch_norm_inference::spatial
                          : op::batch_norm_inference::per_activation;
420
        }
Paul's avatar
Paul committed
421
        (void)is_test;
Paul's avatar
Paul committed
422
        op::batch_norm_inference op{epsilon, momentum, bn_mode};
Paul's avatar
Paul committed
423
        return prog.add_instruction(op, std::move(args));
424
425
    }

426
427
428
429
    instruction_ref parse_leaky_relu(const std::string&,
                                     attribute_map attributes,
                                     std::vector<instruction_ref> args)
    {
Khalique's avatar
Khalique committed
430
        float alpha = 0.01; // default alpha val for leaky relu
431
432
433
434
435
436
437
438
        if(contains(attributes, "alpha"))
        {
            alpha = parse_value(attributes.at("alpha")).at<float>();
        }
        op::leaky_relu op{alpha};
        return prog.add_instruction(op, args.front());
    }

Khalique's avatar
Khalique committed
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
    instruction_ref parse_imagescaler(const std::string&,
                                      attribute_map attributes,
                                      std::vector<instruction_ref> args)
    {
        float scale = 1.0;
        std::vector<float> bias{};
        if(contains(attributes, "scale"))
        {
            scale = parse_value(attributes.at("scale")).at<float>();
        }

        if(contains(attributes, "bias"))
        {
            auto&& bias_floats = attributes["bias"].floats();
            bias               = std::vector<float>(bias_floats.begin(), bias_floats.end());
        }
        auto input_shape = args.front()->get_shape();
Khalique's avatar
Khalique committed
456

Khalique's avatar
Khalique committed
457
458
        auto scale_val = prog.add_literal(scale);
        auto bias_vals = prog.add_literal(
Paul's avatar
Paul committed
459
            migraphx::literal{migraphx::shape{migraphx::shape::float_type, {bias.size()}}, bias});
Khalique's avatar
Khalique committed
460

Paul's avatar
Paul committed
461
462
        auto scale_tensor = prog.add_instruction(migraphx::op::scalar{input_shape}, scale_val);
        auto img_scaled   = prog.add_instruction(migraphx::op::mul{}, args.front(), scale_tensor);
Paul's avatar
Paul committed
463
        auto bias_bcast = prog.add_instruction(migraphx::op::broadcast{1, input_shape}, bias_vals);
Paul's avatar
Paul committed
464
        return prog.add_instruction(migraphx::op::add{}, img_scaled, bias_bcast);
Khalique's avatar
Khalique committed
465
    }
Khalique's avatar
Khalique committed
466

Khalique's avatar
Khalique committed
467
468
    instruction_ref
    parse_transpose(const std::string&, attribute_map attributes, std::vector<instruction_ref> args)
Khalique's avatar
Khalique committed
469
470
471
472
473
474
475
    {
        std::vector<int64_t> perm{};
        if(contains(attributes, "perm"))
        {
            auto&& perm_vals = attributes["perm"].ints();
            perm             = std::vector<int64_t>(perm_vals.begin(), perm_vals.end());
        }
Paul's avatar
Paul committed
476
        return prog.add_instruction(migraphx::op::transpose{perm}, args.front());
Khalique's avatar
Khalique committed
477
478
    }

Paul's avatar
Paul committed
479
480
481
482
483
    void parse_from(std::istream& is)
    {
        onnx::ModelProto model;
        if(model.ParseFromIstream(&is))
        {
484
            auto str_toupper = [](std::string s) {
Scott Thornton's avatar
Scott Thornton committed
485
486
                std::transform(
                    s.begin(), s.end(), s.begin(), [](unsigned char c) { return std::toupper(c); });
487
488
489
490
491
                return s;
            };
            auto producer_name = str_toupper(model.producer_name());
            std::cout << producer_name << std::endl;

Paul's avatar
Paul committed
492
493
494
495
496
497
498
499
500
501
502
503
504
505
            if(model.has_graph())
            {
                this->parse_graph(model.graph());
            }
        }
        else
        {
            throw std::runtime_error("Failed reading");
        }
    }

    void parse_graph(const onnx::GraphProto& graph)
    {
        nodes = get_nodes(graph);
506
507
508
509
510
        std::unordered_map<std::string, onnx::TensorProto> initializer_data;
        for(auto&& f : graph.initializer())
        {
            initializer_data[f.name()] = f;
        }
Paul's avatar
Paul committed
511
512
513
        for(auto&& input : graph.input())
        {
            const std::string& name = input.name();
514
515
516
517
518
519
520
521
522
523
524
525
            // Does the input have an initializer?
            if(contains(initializer_data, name))
            {
                auto t             = initializer_data[name];
                instructions[name] = prog.add_literal(parse_tensor(t));
            }
            else
            {
                // TODO: Get shape of input parameter
                shape s            = parse_type(input.type());
                instructions[name] = prog.add_parameter(name, s);
            }
Paul's avatar
Paul committed
526
527
528
        }
        for(auto&& p : nodes)
        {
529
            this->parse_node(get_name(p.second));
Paul's avatar
Paul committed
530
531
532
        }
    }

Paul's avatar
Paul committed
533
    void parse_node(const std::string& name)
Paul's avatar
Paul committed
534
    {
Paul's avatar
Paul committed
535
        if(name.empty())
Paul's avatar
Paul committed
536
            MIGRAPH_THROW("Onnx node must have a name");
Paul's avatar
Paul committed
537
538
539
540
541
542
543
544
        if(instructions.count(name) == 0)
        {
            auto&& node = nodes.at(name);
            std::vector<instruction_ref> args;
            for(auto&& input : node.input())
            {
                if(nodes.count(input) > 0)
                {
545
                    auto&& iname = get_name(nodes.at(input));
Paul's avatar
Paul committed
546
                    assert(name != iname);
Paul's avatar
Paul committed
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
                    this->parse_node(iname);
                    args.push_back(instructions.at(iname));
                }
                else
                {
                    args.push_back(instructions.at(input));
                }
            }
            if(ops.count(node.op_type()) == 0)
            {
                instructions[name] = prog.add_instruction(unknown{node.op_type()}, args);
            }
            else
            {
                instructions[name] = ops[node.op_type()](get_attributes(node), args);
            }
        }
    }

    static attribute_map get_attributes(const onnx::NodeProto& node)
    {
        std::unordered_map<std::string, onnx::AttributeProto> result;
        for(auto&& attr : node.attribute())
        {
            result[attr.name()] = attr;
        }
        return result;
    }

576
577
578
579
    static std::string get_name(const onnx::NodeProto& node)
    {
        if(node.name().empty())
        {
Paul's avatar
Paul committed
580
            std::string generated = "migraphx_unnamed_node";
Paul's avatar
Paul committed
581
582
583
584
            return std::accumulate(node.output().begin(),
                                   node.output().end(),
                                   generated,
                                   [](auto x, auto y) { return x + "_" + y; });
585
586
587
588
        }
        return node.name();
    }

Paul's avatar
Paul committed
589
590
591
592
593
    static node_map get_nodes(const onnx::GraphProto& graph)
    {
        std::unordered_map<std::string, onnx::NodeProto> result;
        for(auto&& node : graph.node())
        {
594
            result[get_name(node)] = node;
Paul's avatar
Paul committed
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
            for(auto&& output : node.output())
            {
                result[output] = node;
            }
        }
        return result;
    }

    template <class T>
    static literal from_repeated(shape::type_t t, const T& r)
    {
        std::size_t size = r.size();
        return literal{{t, {size}}, r.begin(), r.end()};
    }

    static literal parse_value(const onnx::AttributeProto& attr)
    {
        switch(attr.type())
        {
        case onnx::AttributeProto::UNDEFINED: return {};
        case onnx::AttributeProto::FLOAT: return literal{attr.f()};
        case onnx::AttributeProto::INT: return literal{attr.i()};
        case onnx::AttributeProto::STRING: return {};
        case onnx::AttributeProto::TENSOR: return parse_tensor(attr.t());
        case onnx::AttributeProto::GRAPH: return {};
Paul's avatar
Paul committed
620
        case onnx::AttributeProto::FLOATS: return from_repeated(shape::float_type, attr.floats());
Paul's avatar
Paul committed
621
622
623
624
625
        case onnx::AttributeProto::INTS: return from_repeated(shape::int64_type, attr.ints());
        case onnx::AttributeProto::STRINGS: return {};
        case onnx::AttributeProto::TENSORS: return {};
        case onnx::AttributeProto::GRAPHS: return {};
        }
Paul's avatar
Paul committed
626
        MIGRAPH_THROW("Invalid attribute type");
Paul's avatar
Paul committed
627
628
629
630
631
    }

    static literal parse_tensor(const onnx::TensorProto& t)
    {
        std::vector<std::size_t> dims(t.dims().begin(), t.dims().end());
632
633
        if(t.has_raw_data())
        {
wsttiger's avatar
wsttiger committed
634
            const std::string& s = t.raw_data();
Scott Thornton's avatar
Scott Thornton committed
635
636
637
638
639
640
641
642
643
644
645
646
            switch(t.data_type())
            {
            case onnx::TensorProto::UNDEFINED: throw std::runtime_error("");
            case onnx::TensorProto::FLOAT: return literal{{shape::float_type, dims}, s.data()};
            case onnx::TensorProto::UINT8: throw std::runtime_error("");
            case onnx::TensorProto::INT8: return literal{{shape::int32_type, dims}, s.data()};
            case onnx::TensorProto::UINT16: return literal{{shape::int32_type, dims}, s.data()};
            case onnx::TensorProto::INT16: return literal{{shape::int32_type, dims}, s.data()};
            case onnx::TensorProto::INT32: return literal{{shape::int32_type, dims}, s.data()};
            case onnx::TensorProto::INT64: return literal{{shape::int64_type, dims}, s.data()};
            case onnx::TensorProto::STRING: throw std::runtime_error("");
            case onnx::TensorProto::BOOL: return literal{{shape::int32_type, dims}, s.data()};
Paul's avatar
Paul committed
647
            case onnx::TensorProto::FLOAT16: return literal{{shape::half_type, dims}, s.data()};
Scott Thornton's avatar
Scott Thornton committed
648
649
650
651
652
653
654
            case onnx::TensorProto::DOUBLE: return literal{{shape::double_type, dims}, s.data()};
            case onnx::TensorProto::UINT32: throw std::runtime_error("");
            case onnx::TensorProto::UINT64: throw std::runtime_error("");
            case onnx::TensorProto::COMPLEX64: throw std::runtime_error("");
            case onnx::TensorProto::COMPLEX128: throw std::runtime_error("");
            }
            MIGRAPH_THROW("Invalid tensor type");
655
        }
Paul's avatar
Paul committed
656
657
658
659
        switch(t.data_type())
        {
        case onnx::TensorProto::UNDEFINED: throw std::runtime_error("");
        case onnx::TensorProto::FLOAT:
Paul's avatar
Paul committed
660
            return literal{{shape::float_type, dims}, t.float_data().begin(), t.float_data().end()};
Paul's avatar
Paul committed
661
662
        case onnx::TensorProto::UINT8: throw std::runtime_error("");
        case onnx::TensorProto::INT8:
Paul's avatar
Paul committed
663
            return literal{{shape::int32_type, dims}, t.int32_data().begin(), t.int32_data().end()};
Paul's avatar
Paul committed
664
        case onnx::TensorProto::UINT16:
Paul's avatar
Paul committed
665
            return literal{{shape::int32_type, dims}, t.int32_data().begin(), t.int32_data().end()};
Paul's avatar
Paul committed
666
        case onnx::TensorProto::INT16:
Paul's avatar
Paul committed
667
            return literal{{shape::int32_type, dims}, t.int32_data().begin(), t.int32_data().end()};
Paul's avatar
Paul committed
668
        case onnx::TensorProto::INT32:
Paul's avatar
Paul committed
669
            return literal{{shape::int32_type, dims}, t.int32_data().begin(), t.int32_data().end()};
Paul's avatar
Paul committed
670
        case onnx::TensorProto::INT64:
Paul's avatar
Paul committed
671
            return literal{{shape::int64_type, dims}, t.int64_data().begin(), t.int64_data().end()};
Paul's avatar
Paul committed
672
673
        case onnx::TensorProto::STRING: throw std::runtime_error("");
        case onnx::TensorProto::BOOL:
Paul's avatar
Paul committed
674
            return literal{{shape::int32_type, dims}, t.int32_data().begin(), t.int32_data().end()};
Paul's avatar
Paul committed
675
676
        case onnx::TensorProto::FLOAT16:
            return literal{{shape::half_type, dims}, t.float_data().begin(), t.float_data().end()};
Paul's avatar
Paul committed
677
678
679
680
681
682
683
684
        case onnx::TensorProto::DOUBLE:
            return literal{
                {shape::double_type, dims}, t.double_data().begin(), t.double_data().end()};
        case onnx::TensorProto::UINT32: throw std::runtime_error("");
        case onnx::TensorProto::UINT64: throw std::runtime_error("");
        case onnx::TensorProto::COMPLEX64: throw std::runtime_error("");
        case onnx::TensorProto::COMPLEX128: throw std::runtime_error("");
        }
Paul's avatar
Paul committed
685
        MIGRAPH_THROW("Invalid tensor type");
Paul's avatar
Paul committed
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
    }

    static shape parse_type(const onnx::TypeProto& t)
    {
        shape::type_t shape_type{};
        switch(t.tensor_type().elem_type())
        {
        case onnx::TensorProto::UNDEFINED:
            break; // throw std::runtime_error("Unsupported type UNDEFINED");
        case onnx::TensorProto::FLOAT: shape_type = shape::float_type; break;
        case onnx::TensorProto::UINT8:
            break; // throw std::runtime_error("Unsupported type UINT8");
        case onnx::TensorProto::INT8: shape_type = shape::int8_type; break;
        case onnx::TensorProto::UINT16: shape_type = shape::uint16_type; break;
        case onnx::TensorProto::INT16: shape_type = shape::int16_type; break;
        case onnx::TensorProto::INT32: shape_type = shape::int32_type; break;
        case onnx::TensorProto::INT64: shape_type = shape::int64_type; break;
        case onnx::TensorProto::STRING:
            break; // throw std::runtime_error("Unsupported type STRING");
        case onnx::TensorProto::BOOL:
            break; // throw std::runtime_error("Unsupported type BOOL");
Paul's avatar
Paul committed
707
        case onnx::TensorProto::FLOAT16: shape_type = shape::half_type; break;
Paul's avatar
Paul committed
708
709
710
711
712
713
714
715
716
        case onnx::TensorProto::DOUBLE: shape_type = shape::double_type; break;
        case onnx::TensorProto::UINT32: shape_type = shape::uint32_type; break;
        case onnx::TensorProto::UINT64: shape_type = shape::uint64_type; break;
        case onnx::TensorProto::COMPLEX64:
            break; // throw std::runtime_error("Unsupported type COMPLEX64");
        case onnx::TensorProto::COMPLEX128:
            break; // throw std::runtime_error("Unsupported type COMPLEX128");
        }
        std::vector<std::size_t> dims;
Paul's avatar
Paul committed
717
        auto&& tensor_dims = t.tensor_type().shape().dim();
718
719
720
721
722
723
724
725
726
727
728
        std::transform(tensor_dims.begin(),
                       tensor_dims.end(),
                       std::back_inserter(dims),
                       [](auto&& d) -> std::size_t {
                           if(not d.has_dim_value())
                           {
                               long default_batch_size = 1; // FIXME
                               return default_batch_size;
                           }
                           return d.dim_value();
                       });
Paul's avatar
Paul committed
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
        return {shape_type, dims};
    }
};

program parse_onnx(const std::string& name)
{
    std::fstream input(name.c_str(), std::ios::in | std::ios::binary);
    onnx_parser parser;
#ifndef NDEBUG
    // Log the program when it can't be parsed
    try
    {
        parser.parse_from(input);
    }
    catch(...)
    {
        std::cerr << parser.prog << std::endl;
        throw;
    }
#else
    parser.parse_from(input);
#endif
    return std::move(parser.prog);
}

754
} // namespace MIGRAPH_INLINE_NS
Paul's avatar
Paul committed
755
} // namespace migraphx