test_serializer.py 3.31 KB
Newer Older
1
import json
2
import math
3
4
5
6
7
from pathlib import Path
import re
import sys

import torch
8
from nni.retiarii import json_dumps, json_loads, serialize
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.datasets import MNIST

sys.path.insert(0, Path(__file__).parent.as_posix())

from imported.model import ImportTest


class Foo:
    def __init__(self, a, b=1):
        self.aa = a
        self.bb = [b + 1 for _ in range(1000)]

    def __eq__(self, other):
        return self.aa == other.aa and self.bb == other.bb


27
28
def test_serialize():
    module = serialize(Foo, 3)
29
    assert json_loads(json_dumps(module)) == module
30
    module = serialize(Foo, b=2, a=1)
31
32
    assert json_loads(json_dumps(module)) == module

33
    module = serialize(Foo, Foo(1), 5)
34
35
36
    dumped_module = json_dumps(module)
    assert len(dumped_module) > 200  # should not be too longer if the serialization is correct

37
    module = serialize(Foo, serialize(Foo, 1), 5)
38
39
40
41
42
    dumped_module = json_dumps(module)
    assert len(dumped_module) < 200  # should not be too longer if the serialization is correct
    assert json_loads(dumped_module) == module


43
def test_basic_unit():
44
45
46
47
48
    module = ImportTest(3, 0.5)
    assert json_loads(json_dumps(module)) == module


def test_dataset():
49
50
    dataset = serialize(MNIST, root='data/mnist', train=False, download=True)
    dataloader = serialize(DataLoader, dataset, batch_size=10)
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65

    dumped_ans = {
        "__type__": "torch.utils.data.dataloader.DataLoader",
        "arguments": {
            "batch_size": 10,
            "dataset": {
                "__type__": "torchvision.datasets.mnist.MNIST",
                "arguments": {"root": "data/mnist", "train": False, "download": True}
            }
        }
    }
    assert json_dumps(dataloader) == json_dumps(dumped_ans)
    dataloader = json_loads(json_dumps(dumped_ans))
    assert isinstance(dataloader, DataLoader)

66
67
    dataset = serialize(MNIST, root='data/mnist', train=False, download=True,
                       transform=serialize(
68
                           transforms.Compose,
69
                           [serialize(transforms.ToTensor), serialize(transforms.Normalize, (0.1307,), (0.3081,))]
70
                       ))
71
    dataloader = serialize(DataLoader, dataset, batch_size=10)
72
73
74
75
    x, y = next(iter(json_loads(json_dumps(dataloader))))
    assert x.size() == torch.Size([10, 1, 28, 28])
    assert y.size() == torch.Size([10])

76
    dataset = serialize(MNIST, root='data/mnist', train=False, download=True,
77
                       transform=transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]))
78
    dataloader = serialize(DataLoader, dataset, batch_size=10)
79
80
81
82
83
84
85
86
87
    x, y = next(iter(json_loads(json_dumps(dataloader))))
    assert x.size() == torch.Size([10, 1, 28, 28])
    assert y.size() == torch.Size([10])


def test_type():
    assert json_dumps(torch.optim.Adam) == '{"__typename__": "torch.optim.adam.Adam"}'
    assert json_loads('{"__typename__": "torch.optim.adam.Adam"}') == torch.optim.Adam
    assert re.match(r'{"__typename__": "(.*)test_serializer.Foo"}', json_dumps(Foo))
88
89
    assert json_dumps(math.floor) == '{"__typename__": "math.floor"}'
    assert json_loads('{"__typename__": "math.floor"}') == math.floor
90
91
92


if __name__ == '__main__':
93
94
    test_serialize()
    test_basic_unit()
95
96
    test_dataset()
    test_type()