train.py 3.79 KB
Newer Older
LDOUBLEV's avatar
LDOUBLEV committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os
import sys
WenmuZhou's avatar
WenmuZhou committed
21

22
__dir__ = os.path.dirname(os.path.abspath(__file__))
LDOUBLEV's avatar
LDOUBLEV committed
23
sys.path.append(__dir__)
24
sys.path.append(os.path.abspath(os.path.join(__dir__, '..')))
LDOUBLEV's avatar
LDOUBLEV committed
25

WenmuZhou's avatar
WenmuZhou committed
26
27
28
import yaml
import paddle
import paddle.distributed as dist
LDOUBLEV's avatar
LDOUBLEV committed
29

dyning's avatar
dyning committed
30
paddle.seed(2)
LDOUBLEV's avatar
LDOUBLEV committed
31

WenmuZhou's avatar
WenmuZhou committed
32
from ppocr.data import build_dataloader
dyning's avatar
dyning committed
33
34
from ppocr.modeling.architectures import build_model
from ppocr.losses import build_loss
WenmuZhou's avatar
WenmuZhou committed
35
36
37
38
39
from ppocr.optimizer import build_optimizer
from ppocr.postprocess import build_post_process
from ppocr.metrics import build_metric
from ppocr.utils.save_load import init_model
import tools.program as program
LDOUBLEV's avatar
LDOUBLEV committed
40

WenmuZhou's avatar
WenmuZhou committed
41
dist.get_world_size()
LDOUBLEV's avatar
LDOUBLEV committed
42
43


WenmuZhou's avatar
WenmuZhou committed
44
45
46
47
def main(config, device, logger, vdl_writer):
    # init dist environment
    if config['Global']['distributed']:
        dist.init_parallel_env()
LDOUBLEV's avatar
LDOUBLEV committed
48

WenmuZhou's avatar
WenmuZhou committed
49
    global_config = config['Global']
dyning's avatar
dyning committed
50

WenmuZhou's avatar
WenmuZhou committed
51
    # build dataloader
dyning's avatar
dyning committed
52
    train_dataloader = build_dataloader(config, 'Train', device, logger)
dyning's avatar
dyning committed
53
    if config['Eval']:
dyning's avatar
dyning committed
54
        valid_dataloader = build_dataloader(config, 'Eval', device, logger)
WenmuZhou's avatar
WenmuZhou committed
55
    else:
dyning's avatar
dyning committed
56
57
        valid_dataloader = None

WenmuZhou's avatar
WenmuZhou committed
58
    # build post process
dyning's avatar
dyning committed
59
60
61
    post_process_class = build_post_process(config['PostProcess'],
                                            global_config)

WenmuZhou's avatar
WenmuZhou committed
62
    # build model
WenmuZhou's avatar
WenmuZhou committed
63
    # for rec algorithm
WenmuZhou's avatar
WenmuZhou committed
64
    if hasattr(post_process_class, 'character'):
dyning's avatar
dyning committed
65
66
        char_num = len(getattr(post_process_class, 'character'))
        config['Architecture']["Head"]['out_channels'] = char_num
WenmuZhou's avatar
WenmuZhou committed
67
68
69
70
    model = build_model(config['Architecture'])
    if config['Global']['distributed']:
        model = paddle.DataParallel(model)

dyning's avatar
dyning committed
71
72
    # build loss
    loss_class = build_loss(config['Loss'])
dyning's avatar
dyning committed
73

WenmuZhou's avatar
WenmuZhou committed
74
    # build optim
dyning's avatar
dyning committed
75
76
    optimizer, lr_scheduler = build_optimizer(
        config['Optimizer'],
WenmuZhou's avatar
WenmuZhou committed
77
        epochs=config['Global']['epoch_num'],
dyning's avatar
dyning committed
78
        step_each_epoch=len(train_dataloader),
WenmuZhou's avatar
WenmuZhou committed
79
80
81
82
        parameters=model.parameters())

    # build metric
    eval_class = build_metric(config['Metric'])
dyning's avatar
dyning committed
83
84
    # load pretrain model
    pre_best_model_dict = init_model(config, model, logger, optimizer)
WenmuZhou's avatar
WenmuZhou committed
85

WenmuZhou's avatar
WenmuZhou committed
86
87
    logger.info('train dataloader has {} iter, valid dataloader has {} iter'.
                format(len(train_dataloader), len(valid_dataloader)))
WenmuZhou's avatar
WenmuZhou committed
88
    # start train
dyning's avatar
dyning committed
89
90
91
    program.train(config, train_dataloader, valid_dataloader, device, model,
                  loss_class, optimizer, lr_scheduler, post_process_class,
                  eval_class, pre_best_model_dict, logger, vdl_writer)
dyning's avatar
dyning committed
92
93
94


def test_reader(config, device, logger):
WenmuZhou's avatar
WenmuZhou committed
95
    loader = build_dataloader(config, 'Train', device, logger)
96
97
98
99
    import time
    starttime = time.time()
    count = 0
    try:
dyning's avatar
dyning committed
100
        for data in loader():
101
102
103
104
            count += 1
            if count % 1 == 0:
                batch_time = time.time() - starttime
                starttime = time.time()
dyning's avatar
dyning committed
105
106
                logger.info("reader: {}, {}, {}".format(count,
                                                        len(data), batch_time))
107
    except Exception as e:
LDOUBLEV's avatar
LDOUBLEV committed
108
109
        logger.info(e)
    logger.info("finish reader: {}, Success!".format(count))
110

dyning's avatar
dyning committed
111

LDOUBLEV's avatar
LDOUBLEV committed
112
if __name__ == '__main__':
dyning's avatar
dyning committed
113
114
115
    config, device, logger, vdl_writer = program.preprocess()
    main(config, device, logger, vdl_writer)
#     test_reader(config, device, logger)