main.cpp 19.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16

#include <dlib/xml_parser.h>
#include <dlib/matrix.h>
#include <fstream>
#include <vector>
#include <stack>
#include <set>
#include <dlib/string.h>

using namespace std;
using namespace dlib;


// ----------------------------------------------------------------------------------------

// Only these computational layers have parameters
17
const std::set<string> comp_tags_with_params = {"fc", "fc_no_bias", "con", "affine_con", "affine_fc", "affine", "prelu"};
18
19
20
21
22
23
24
25
26
27
28
29
30

struct layer
{
    string type; // comp, loss, or input
    int idx;

    string detail_name; // The name of the tag inside the layer tag. e.g. fc, con, max_pool, input_rgb_image.
    std::map<string,double> attributes;
    matrix<double> params;
    long tag_id = -1;   // If this isn't -1 then it means this layer was tagged, e.g. wrapped with tag2<> giving tag_id==2
    long skip_id = -1;  // If this isn't -1 then it means this layer draws its inputs from
                        // the most recent layer with tag_id==skip_id rather than its immediate predecessor. 

31
32
33
34
35
36
37
38
39
    double attribute (const string& key) const
    {
        auto i = attributes.find(key);
        if (i != attributes.end())
            return i->second;
        else
            throw dlib::error("Layer doesn't have the requested attribute '" + key + "'.");
    }

40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
    string caffe_layer_name() const 
    { 
        if (type == "input")
            return "data";
        else
            return detail_name+to_string(idx);
    }
};

// ----------------------------------------------------------------------------------------

std::vector<layer> parse_dlib_xml(
    const string& xml_filename
);

// ----------------------------------------------------------------------------------------

template <typename iterator>
string find_layer_caffe_name (
    iterator i,
    long tag_id
)
/*!
    requires
        - i is an iterator pointing to a layer in the list of layers produced by parse_dlib_xml().
        - i is not an input layer.
    ensures
        - if (tag_id == -1) then
            - returns the caffe string name for the previous layer to layer i.
        - else
            - returns the caffe string name for the previous layer to layer i with the given tag_id.
!*/
{
    if (tag_id == -1)
    {
        return (i-1)->caffe_layer_name();
    }
    else
    {
        while(true)
        {
            i--;
            // if we hit the end of the network before we found what we were looking for
            if (i->tag_id == tag_id)
                return i->caffe_layer_name();
85
86
            if (i->type == "input")
                throw dlib::error("Network definition is bad, a layer wanted to skip back to a non-existing layer.");
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
        }
    }
}

template <typename iterator>
string find_input_layer_caffe_name (iterator i) { return find_layer_caffe_name(i, i->skip_id); }

// ----------------------------------------------------------------------------------------

template <typename EXP>
void print_as_np_array(std::ostream& out, const matrix_exp<EXP>& m)
{
    out << "np.array([";
    for (auto x : m)
        out << x << ",";
    out << "], dtype='float32')";
}

// ----------------------------------------------------------------------------------------

void convert_dlib_xml_to_cafffe_python_code(
    const string& xml_filename
)
{
111
112
113
114
    const string out_filename = left_substr(xml_filename,".") + "_dlib_to_caffe_model.py";
    cout << "Writing model to " << out_filename << endl;
    ofstream fout(out_filename);
    fout.precision(9);
115
    const auto layers = parse_dlib_xml(xml_filename);
116

117
118
119
    fout << "import caffe " << endl;
    fout << "from caffe import layers as L, params as P" << endl;
    fout << "import numpy as np" << endl;
120

121
    // dlib nets don't commit to a batch size, so just use 1 as the default
122
123
    fout << "\n# Input tensor dimensions" << endl;
    fout << "batch_size = 1;" << endl;
124
125
    if (layers.back().detail_name == "input_rgb_image")
    {
126
127
128
        fout << "input_nr = 28; #WARNING, the source dlib network didn't commit to a specific input size, so we put 28 here as a default." << endl;
        fout << "input_nc = 28; #WARNING, the source dlib network didn't commit to a specific input size, so we put 28 here as a default." << endl;
        fout << "input_k = 3;" << endl;
129
130
131
    }
    else if (layers.back().detail_name == "input_rgb_image_sized")
    {
132
133
134
        fout << "input_nr = " << layers.back().attribute("nr") << ";" << endl;
        fout << "input_nc = " << layers.back().attribute("nc") << ";" << endl;
        fout << "input_k = 3;" << endl;
135
136
137
    }
    else if (layers.back().detail_name == "input")
    {
138
139
140
        fout << "input_nr = 28; #WARNING, the source dlib network didn't commit to a specific input size, so we put 28 here as a default." << endl;
        fout << "input_nc = 28; #WARNING, the source dlib network didn't commit to a specific input size, so we put 28 here as a default." << endl;
        fout << "input_k = 1;" << endl;
141
142
143
144
145
    }
    else
    {
        throw dlib::error("No known transformation from dlib's " + layers.back().detail_name + " layer to caffe.");
    }
146
    fout << endl;
147

148
149
150
151
152
    fout << "def make_netspec():" << endl;
    fout << "    # For reference, the only \"documentation\" about caffe layer parameters seems to be this page:\n";
    fout << "    # https://github.com/BVLC/caffe/blob/master/src/caffe/proto/caffe.proto\n" << endl;
    fout << "    n = caffe.NetSpec(); " << endl;
    fout << "    n.data,n.label = L.MemoryData(batch_size=batch_size, channels=input_k, height=input_nr, width=input_nc, ntop=2)" << endl;
153
154
155
156
157
158
159
160
161
162
    // iterate the layers starting with the input layer
    for (auto i = layers.rbegin(); i != layers.rend(); ++i)
    {
        // skip input and loss layers
        if (i->type == "loss" || i->type == "input")
            continue;


        if (i->detail_name == "con")
        {
163
164
165
166
167
168
169
170
171
            fout << "    n." << i->caffe_layer_name() << " = L.Convolution(n." << find_input_layer_caffe_name(i);
            fout << ", num_output=" << i->attribute("num_filters");
            fout << ", kernel_w=" << i->attribute("nc");
            fout << ", kernel_h=" << i->attribute("nr");
            fout << ", stride_w=" << i->attribute("stride_x");
            fout << ", stride_h=" << i->attribute("stride_y");
            fout << ", pad_w=" << i->attribute("padding_x");
            fout << ", pad_h=" << i->attribute("padding_y");
            fout << ");\n";
172
173
174
        }
        else if (i->detail_name == "relu")
        {
175
176
            fout << "    n." << i->caffe_layer_name() << " = L.ReLU(n." << find_input_layer_caffe_name(i);
            fout << ");\n";
177
        }
178
179
180
181
182
183
184
185
186
187
188
        else if (i->detail_name == "sig")
        {
            fout << "    n." << i->caffe_layer_name() << " = L.Sigmoid(n." << find_input_layer_caffe_name(i);
            fout << ");\n";
        }
        else if (i->detail_name == "prelu")
        {
            fout << "    n." << i->caffe_layer_name() << " = L.PReLU(n." << find_input_layer_caffe_name(i);
            fout << ", channel_shared=True"; 
            fout << ");\n";
        }
189
190
        else if (i->detail_name == "max_pool")
        {
191
192
            fout << "    n." << i->caffe_layer_name() << " = L.Pooling(n." << find_input_layer_caffe_name(i);
            fout << ", pool=P.Pooling.MAX"; 
193
194
            if (i->attribute("nc")==0)
            {
195
                fout << ", global_pooling=True";
196
197
198
            }
            else
            {
199
200
                fout << ", kernel_w=" << i->attribute("nc");
                fout << ", kernel_h=" << i->attribute("nr");
201
202
203
204
205
206
207
            }
            if (i->attribute("padding_x") != 0 || i->attribute("padding_y") != 0)
            {
                throw dlib::error("dlib and caffe implement pooling with non-zero padding differently, so you can't convert a "
                    "network with such pooling layers.");
            }

208
209
210
211
212
            fout << ", stride_w=" << i->attribute("stride_x");
            fout << ", stride_h=" << i->attribute("stride_y");
            fout << ", pad_w=" << i->attribute("padding_x");
            fout << ", pad_h=" << i->attribute("padding_y");
            fout << ");\n";
213
214
215
        }
        else if (i->detail_name == "avg_pool")
        {
216
217
            fout << "    n." << i->caffe_layer_name() << " = L.Pooling(n." << find_input_layer_caffe_name(i);
            fout << ", pool=P.Pooling.AVE"; 
218
219
            if (i->attribute("nc")==0)
            {
220
                fout << ", global_pooling=True";
221
222
223
            }
            else
            {
224
225
                fout << ", kernel_w=" << i->attribute("nc");
                fout << ", kernel_h=" << i->attribute("nr");
226
227
228
229
230
231
232
            }
            if (i->attribute("padding_x") != 0 || i->attribute("padding_y") != 0)
            {
                throw dlib::error("dlib and caffe implement pooling with non-zero padding differently, so you can't convert a "
                    "network with such pooling layers.");
            }

233
234
235
236
237
            fout << ", stride_w=" << i->attribute("stride_x");
            fout << ", stride_h=" << i->attribute("stride_y");
            fout << ", pad_w=" << i->attribute("padding_x");
            fout << ", pad_h=" << i->attribute("padding_y");
            fout << ");\n";
238
239
240
        }
        else if (i->detail_name == "fc")
        {
241
242
243
244
            fout << "    n." << i->caffe_layer_name() << " = L.InnerProduct(n." << find_input_layer_caffe_name(i);
            fout << ", num_output=" << i->attribute("num_outputs");
            fout << ", bias_term=True";
            fout << ");\n";
245
246
247
        }
        else if (i->detail_name == "fc_no_bias")
        {
248
249
250
251
            fout << "    n." << i->caffe_layer_name() << " = L.InnerProduct(n." << find_input_layer_caffe_name(i);
            fout << ", num_output=" << i->attribute("num_outputs");
            fout << ", bias_term=False";
            fout << ");\n";
252
        }
253
        else if (i->detail_name == "bn_con" || i->detail_name == "bn_fc")
254
        {
255
            throw dlib::error("Conversion from dlib's batch norm layers to caffe's isn't supported.  Instead, "
256
257
                "you should put your dlib network into 'test mode' by switching batch norm layers to affine layers. "
                "Then you can convert that 'test mode' network to caffe.");
258
        }
259
        else if (i->detail_name == "affine_con")
260
        {
261
262
263
            fout << "    n." << i->caffe_layer_name() << " = L.Scale(n." << find_input_layer_caffe_name(i);
            fout << ", bias_term=True";
            fout << ");\n";
264
265
266
        }
        else if (i->detail_name == "affine_fc")
        {
267
268
269
            fout << "    n." << i->caffe_layer_name() << " = L.Scale(n." << find_input_layer_caffe_name(i);
            fout << ", bias_term=True";
            fout << ");\n";
270
271
272
        }
        else if (i->detail_name == "add_prev")
        {
273
274
275
276
            fout << "    n." << i->caffe_layer_name() << " = L.Eltwise(n." << find_input_layer_caffe_name(i);
            fout << ", n." << find_layer_caffe_name(i, i->attribute("tag"));
            fout << ", operation=P.Eltwise.SUM";
            fout << ");\n";
277
278
279
280
281
282
        }
        else
        {
            throw dlib::error("No known transformation from dlib's " + i->detail_name + " layer to caffe.");
        }
    }
283
    fout << "    return n.to_proto();\n\n" << endl;
284

285
286
287
288
289

    // -------------------------
    // -------------------------


290
291
292
293
294
    fout << "def save_as_caffe_model(def_file, weights_file):\n";
    fout << "    with open(def_file, 'w') as f: f.write(str(make_netspec()));\n";
    fout << "    net = caffe.Net(def_file, caffe.TEST);\n";
    fout << "    set_network_weights(net);\n";
    fout << "    net.save(weights_file);\n\n";
295
296


297

298
299
300
    // -----------------------------------------------------------------------------------
    //  The next block of code outputs python code that populates all the filter weights.
    // -----------------------------------------------------------------------------------
301

302
303
    fout << "def set_network_weights(net):\n";
    fout << "    # populate network parameters\n";
304
305
306
307
308
309
310
311
312
313
    // iterate the layers starting with the input layer
    for (auto i = layers.rbegin(); i != layers.rend(); ++i)
    {
        // skip input and loss layers
        if (i->type == "loss" || i->type == "input")
            continue;


        if (i->detail_name == "con")
        {
314
            const long num_filters = i->attribute("num_filters");
315
316
317
318
            matrix<double> weights = trans(rowm(i->params,range(0,i->params.size()-num_filters-1)));
            matrix<double> biases  = trans(rowm(i->params,range(i->params.size()-num_filters, i->params.size()-1)));

            // main filter weights
319
320
321
            fout << "    p = "; print_as_np_array(fout,weights); fout << ";\n";
            fout << "    p.shape = net.params['"<<i->caffe_layer_name()<<"'][0].data.shape;\n";
            fout << "    net.params['"<<i->caffe_layer_name()<<"'][0].data[:] = p;\n";
322
323

            // biases
324
325
326
            fout << "    p = "; print_as_np_array(fout,biases); fout << ";\n";
            fout << "    p.shape = net.params['"<<i->caffe_layer_name()<<"'][1].data.shape;\n";
            fout << "    net.params['"<<i->caffe_layer_name()<<"'][1].data[:] = p;\n";
327
328
329
330
331
332
333
        }
        else if (i->detail_name == "fc")
        {
            matrix<double> weights = trans(rowm(i->params, range(0,i->params.nr()-2))); 
            matrix<double> biases  = rowm(i->params, i->params.nr()-1); 

            // main filter weights
334
335
336
            fout << "    p = "; print_as_np_array(fout,weights); fout << ";\n";
            fout << "    p.shape = net.params['"<<i->caffe_layer_name()<<"'][0].data.shape;\n";
            fout << "    net.params['"<<i->caffe_layer_name()<<"'][0].data[:] = p;\n";
337
338

            // biases
339
340
341
            fout << "    p = "; print_as_np_array(fout,biases); fout << ";\n";
            fout << "    p.shape = net.params['"<<i->caffe_layer_name()<<"'][1].data.shape;\n";
            fout << "    net.params['"<<i->caffe_layer_name()<<"'][1].data[:] = p;\n";
342
343
344
345
346
347
        }
        else if (i->detail_name == "fc_no_bias")
        {
            matrix<double> weights = trans(i->params); 

            // main filter weights
348
349
350
            fout << "    p = "; print_as_np_array(fout,weights); fout << ";\n";
            fout << "    p.shape = net.params['"<<i->caffe_layer_name()<<"'][0].data.shape;\n";
            fout << "    net.params['"<<i->caffe_layer_name()<<"'][0].data[:] = p;\n";
351
        }
352
        else if (i->detail_name == "affine_con" || i->detail_name == "affine_fc")
353
        {
354
355
356
357
358
            const long dims = i->params.size()/2;
            matrix<double> gamma = trans(rowm(i->params,range(0,dims-1)));
            matrix<double> beta  = trans(rowm(i->params,range(dims, 2*dims-1)));

            // set gamma weights
359
360
361
            fout << "    p = "; print_as_np_array(fout,gamma); fout << ";\n";
            fout << "    p.shape = net.params['"<<i->caffe_layer_name()<<"'][0].data.shape;\n";
            fout << "    net.params['"<<i->caffe_layer_name()<<"'][0].data[:] = p;\n";
362
363

            // set beta weights 
364
365
366
            fout << "    p = "; print_as_np_array(fout,beta); fout << ";\n";
            fout << "    p.shape = net.params['"<<i->caffe_layer_name()<<"'][1].data.shape;\n";
            fout << "    net.params['"<<i->caffe_layer_name()<<"'][1].data[:] = p;\n";
367
        }
368
369
370
371
372
373
374
375
376
        else if (i->detail_name == "prelu")
        {
            const double param = i->params(0);

            // main filter weights
            fout << "    tmp = net.params['"<<i->caffe_layer_name()<<"'][0].data.view();\n";
            fout << "    tmp.shape = 1;\n";
            fout << "    tmp[0] = "<<param<<";\n";
        }
377
378
379
380
381
382
383
384
    }

}

// ----------------------------------------------------------------------------------------

int main(int argc, char** argv) try
{
385
386
387
388
389
390
391
    if (argc == 1)
    {
        cout << "Give this program an xml file generated by dlib::net_to_xml() and it will" << endl;
        cout << "convert it into a python file that outputs a caffe model containing the dlib model." << endl;
        return 0;
    }

392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
    for (int i = 1; i < argc; ++i)
        convert_dlib_xml_to_cafffe_python_code(argv[i]);

    return 0;
}
catch(std::exception& e)
{
    cout << "\n\n*************** ERROR CONVERTING TO CAFFE ***************\n" << e.what() << endl;
    return 1;
}

// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------

class doc_handler : public document_handler
{
public:
    std::vector<layer> layers;
    bool seen_first_tag = false;

    layer next_layer;
    std::stack<string> current_tag;
    long tag_id = -1;


    virtual void start_document (
    ) 
    { 
        layers.clear(); 
        seen_first_tag = false;
        tag_id = -1;
    }

    virtual void end_document (
    ) { }

    virtual void start_element ( 
        const unsigned long line_number,
        const std::string& name,
        const dlib::attribute_list& atts
    )
    {
        if (!seen_first_tag)
        {
            if (name != "net")
                throw dlib::error("The top level XML tag must be a 'net' tag.");
            seen_first_tag = true;
        }

        if (name == "layer")
        {
            next_layer = layer();
            if (atts["type"] == "skip")
            {
                // Don't make a new layer, just apply the tag id to the previous layer
                if (layers.size() == 0)
                    throw dlib::error("A skip layer was found as the first layer, but the first layer should be an input layer.");
                layers.back().skip_id = sa = atts["id"];
                
                // We intentionally leave next_layer empty so the end_element() callback
                // don't add it as another layer when called.
            }
            else if (atts["type"] == "tag")
            {
                // Don't make a new layer, just remember the tag id so we can apply it on
                // the next layer.
                tag_id = sa = atts["id"];
                
                // We intentionally leave next_layer empty so the end_element() callback
                // don't add it as another layer when called.
            }
            else
            {
                next_layer.idx = sa = atts["idx"];
                next_layer.type = atts["type"];
                if (tag_id != -1)
                {
                    next_layer.tag_id = tag_id;
                    tag_id = -1;
                }
            }
        }
        else if (current_tag.size() != 0 && current_tag.top() == "layer")
        {
            next_layer.detail_name = name;
            // copy all the XML tag's attributes into the layer struct
            atts.reset();
            while (atts.move_next())
                next_layer.attributes[atts.element().key()] = sa = atts.element().value();
        }

        current_tag.push(name);
    }

    virtual void end_element ( 
        const unsigned long line_number,
        const std::string& name
    )
    {
        current_tag.pop();
        if (name == "layer" && next_layer.type.size() != 0)
            layers.push_back(next_layer);
    }

    virtual void characters ( 
        const std::string& data
    )
    {
        if (current_tag.size() == 0)
            return;

        if (comp_tags_with_params.count(current_tag.top()) != 0)
        {
            istringstream sin(data);
            sin >> next_layer.params;
        }

    }

    virtual void processing_instruction (
        const unsigned long line_number,
        const std::string& target,
        const std::string& data
    )
    {
    }
};

// ----------------------------------------------------------------------------------------

std::vector<layer> parse_dlib_xml(
    const string& xml_filename
)
{
    doc_handler dh;
    parse_xml(xml_filename, dh);
    if (dh.layers.size() == 0)
        throw dlib::error("No layers found in XML file!");

    if (dh.layers.back().type != "input")
        throw dlib::error("The network in the XML file is missing an input layer!");

    return dh.layers;
}

// ----------------------------------------------------------------------------------------