test_image.py 25.1 KB
Newer Older
Kai Chen's avatar
Kai Chen committed
1
# Copyright (c) Open-MMLab. All rights reserved.
2
3
4
import os
import os.path as osp
import tempfile
Rui Xu's avatar
Rui Xu committed
5
from pathlib import Path
6

7
import cv2
8
9
import numpy as np
import pytest
Kai Chen's avatar
Kai Chen committed
10
11
12
from numpy.testing import assert_array_almost_equal, assert_array_equal

import mmcv
13
14


Kai Chen's avatar
Kai Chen committed
15
class TestIO:
16
17
18
19
20

    @classmethod
    def setup_class(cls):
        # the test img resolution is 400x300
        cls.img_path = osp.join(osp.dirname(__file__), 'data/color.jpg')
Rui Xu's avatar
Rui Xu committed
21
        cls.img_path_obj = Path(cls.img_path)
22
23
        cls.gray_img_path = osp.join(
            osp.dirname(__file__), 'data/grayscale.jpg')
Rui Xu's avatar
Rui Xu committed
24
        cls.gray_img_path_obj = Path(cls.gray_img_path)
25
        cls.img = cv2.imread(cls.img_path)
26
27
28
29
30
31
32
33

    def assert_img_equal(self, img, ref_img, ratio_thr=0.999):
        assert img.shape == ref_img.shape
        assert img.dtype == ref_img.dtype
        area = ref_img.shape[0] * ref_img.shape[1]
        diff = np.abs(img.astype('int32') - ref_img.astype('int32'))
        assert np.sum(diff <= 1) / float(area) > ratio_thr

34
    def test_imread(self):
Joanna's avatar
Joanna committed
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
        # backend cv2
        mmcv.use_backend('cv2')

        img_cv2_color_bgr = mmcv.imread(self.img_path)
        assert img_cv2_color_bgr.shape == (300, 400, 3)
        img_cv2_color_rgb = mmcv.imread(self.img_path, channel_order='rgb')
        assert img_cv2_color_rgb.shape == (300, 400, 3)
        assert_array_equal(img_cv2_color_rgb[:, :, ::-1], img_cv2_color_bgr)
        img_cv2_grayscale1 = mmcv.imread(self.img_path, 'grayscale')
        assert img_cv2_grayscale1.shape == (300, 400)
        img_cv2_grayscale2 = mmcv.imread(self.gray_img_path)
        assert img_cv2_grayscale2.shape == (300, 400, 3)
        img_cv2_unchanged = mmcv.imread(self.gray_img_path, 'unchanged')
        assert img_cv2_unchanged.shape == (300, 400)
        img_cv2_unchanged = mmcv.imread(img_cv2_unchanged)
        assert_array_equal(img_cv2_unchanged, mmcv.imread(img_cv2_unchanged))
Rui Xu's avatar
Rui Xu committed
51
52
53
54
55
56
57
58
59
60
61
62

        img_cv2_color_bgr = mmcv.imread(self.img_path_obj)
        assert img_cv2_color_bgr.shape == (300, 400, 3)
        img_cv2_color_rgb = mmcv.imread(self.img_path_obj, channel_order='rgb')
        assert img_cv2_color_rgb.shape == (300, 400, 3)
        assert_array_equal(img_cv2_color_rgb[:, :, ::-1], img_cv2_color_bgr)
        img_cv2_grayscale1 = mmcv.imread(self.img_path_obj, 'grayscale')
        assert img_cv2_grayscale1.shape == (300, 400)
        img_cv2_grayscale2 = mmcv.imread(self.gray_img_path_obj)
        assert img_cv2_grayscale2.shape == (300, 400, 3)
        img_cv2_unchanged = mmcv.imread(self.gray_img_path_obj, 'unchanged')
        assert img_cv2_unchanged.shape == (300, 400)
63
        with pytest.raises(TypeError):
64
            mmcv.imread(1)
65

Joanna's avatar
Joanna committed
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
        # backend turbojpeg
        mmcv.use_backend('turbojpeg')

        img_turbojpeg_color_bgr = mmcv.imread(self.img_path)
        assert img_turbojpeg_color_bgr.shape == (300, 400, 3)
        assert_array_equal(img_turbojpeg_color_bgr, img_cv2_color_bgr)

        img_turbojpeg_color_rgb = mmcv.imread(
            self.img_path, channel_order='rgb')
        assert img_turbojpeg_color_rgb.shape == (300, 400, 3)
        assert_array_equal(img_turbojpeg_color_rgb, img_cv2_color_rgb)

        with pytest.raises(ValueError):
            mmcv.imread(self.img_path, channel_order='unsupport_order')

        img_turbojpeg_grayscale1 = mmcv.imread(self.img_path, flag='grayscale')
        assert img_turbojpeg_grayscale1.shape == (300, 400)
        assert_array_equal(img_turbojpeg_grayscale1, img_cv2_grayscale1)

        img_turbojpeg_grayscale2 = mmcv.imread(self.gray_img_path)
        assert img_turbojpeg_grayscale2.shape == (300, 400, 3)
        assert_array_equal(img_turbojpeg_grayscale2, img_cv2_grayscale2)

        img_turbojpeg_grayscale2 = mmcv.imread(img_turbojpeg_grayscale2)
        assert_array_equal(img_turbojpeg_grayscale2,
                           mmcv.imread(img_turbojpeg_grayscale2))

        with pytest.raises(ValueError):
            mmcv.imread(self.gray_img_path, 'unchanged')

        with pytest.raises(TypeError):
            mmcv.imread(1)

        with pytest.raises(AssertionError):
            mmcv.use_backend('unsupport_backend')

        mmcv.use_backend('cv2')

104
    def test_imfrombytes(self):
Joanna's avatar
Joanna committed
105
106
        # backend cv2
        mmcv.use_backend('cv2')
107
108
        with open(self.img_path, 'rb') as f:
            img_bytes = f.read()
Joanna's avatar
Joanna committed
109
110
111
112
113
114
115
116
117
118
119
120
        img_cv2 = mmcv.imfrombytes(img_bytes)
        assert img_cv2.shape == (300, 400, 3)

        # backend turbojpeg
        mmcv.use_backend('turbojpeg')
        with open(self.img_path, 'rb') as f:
            img_bytes = f.read()
        img_turbojpeg = mmcv.imfrombytes(img_bytes)
        assert img_turbojpeg.shape == (300, 400, 3)
        assert_array_equal(img_cv2, img_turbojpeg)

        mmcv.use_backend('cv2')
121

122
123
    def test_imwrite(self):
        img = mmcv.imread(self.img_path)
124
        out_file = osp.join(tempfile.gettempdir(), 'mmcv_test.jpg')
125
126
        mmcv.imwrite(img, out_file)
        rewrite_img = mmcv.imread(out_file)
127
128
129
        os.remove(out_file)
        self.assert_img_equal(img, rewrite_img)

lizz's avatar
lizz committed
130

Kai Chen's avatar
Kai Chen committed
131
class TestColorSpace:
lizz's avatar
lizz committed
132

133
134
135
    def test_bgr2gray(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2gray(in_img)
Kai Chen's avatar
Kai Chen committed
136
137
138
        computed_gray = (
            in_img[:, :, 0] * 0.114 + in_img[:, :, 1] * 0.587 +
            in_img[:, :, 2] * 0.299)
139
140
141
142
143
        assert_array_almost_equal(out_img, computed_gray, decimal=4)
        out_img_3d = mmcv.bgr2gray(in_img, True)
        assert out_img_3d.shape == (10, 10, 1)
        assert_array_almost_equal(out_img_3d[..., 0], out_img, decimal=4)

Kai Chen's avatar
Kai Chen committed
144
145
146
147
148
149
150
151
152
153
154
    def test_rgb2gray(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.rgb2gray(in_img)
        computed_gray = (
            in_img[:, :, 0] * 0.299 + in_img[:, :, 1] * 0.587 +
            in_img[:, :, 2] * 0.114)
        assert_array_almost_equal(out_img, computed_gray, decimal=4)
        out_img_3d = mmcv.rgb2gray(in_img, True)
        assert out_img_3d.shape == (10, 10, 1)
        assert_array_almost_equal(out_img_3d[..., 0], out_img, decimal=4)

155
156
157
158
159
160
161
    def test_gray2bgr(self):
        in_img = np.random.rand(10, 10).astype(np.float32)
        out_img = mmcv.gray2bgr(in_img)
        assert out_img.shape == (10, 10, 3)
        for i in range(3):
            assert_array_almost_equal(out_img[..., i], in_img, decimal=4)

Kai Chen's avatar
Kai Chen committed
162
163
164
165
166
167
168
    def test_gray2rgb(self):
        in_img = np.random.rand(10, 10).astype(np.float32)
        out_img = mmcv.gray2rgb(in_img)
        assert out_img.shape == (10, 10, 3)
        for i in range(3):
            assert_array_almost_equal(out_img[..., i], in_img, decimal=4)

169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
    def test_bgr2rgb(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2rgb(in_img)
        assert out_img.shape == in_img.shape
        assert_array_equal(out_img[..., 0], in_img[..., 2])
        assert_array_equal(out_img[..., 1], in_img[..., 1])
        assert_array_equal(out_img[..., 2], in_img[..., 0])

    def test_rgb2bgr(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.rgb2bgr(in_img)
        assert out_img.shape == in_img.shape
        assert_array_equal(out_img[..., 0], in_img[..., 2])
        assert_array_equal(out_img[..., 1], in_img[..., 1])
        assert_array_equal(out_img[..., 2], in_img[..., 0])

    def test_bgr2hsv(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2hsv(in_img)
        argmax = in_img.argmax(axis=2)
        computed_hsv = np.empty_like(in_img, dtype=in_img.dtype)
        for i in range(in_img.shape[0]):
            for j in range(in_img.shape[1]):
                b = in_img[i, j, 0]
                g = in_img[i, j, 1]
                r = in_img[i, j, 2]
                v = max(r, g, b)
                s = (v - min(r, g, b)) / v if v != 0 else 0
                if argmax[i, j] == 0:
                    h = 240 + 60 * (r - g) / (v - min(r, g, b))
                elif argmax[i, j] == 1:
                    h = 120 + 60 * (b - r) / (v - min(r, g, b))
                else:
                    h = 60 * (g - b) / (v - min(r, g, b))
                if h < 0:
                    h += 360
                computed_hsv[i, j, :] = [h, s, v]
        assert_array_almost_equal(out_img, computed_hsv, decimal=2)

Gu Wang's avatar
Gu Wang committed
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
    def test_bgr2hls(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2hls(in_img)
        argmax = in_img.argmax(axis=2)
        computed_hls = np.empty_like(in_img, dtype=in_img.dtype)
        for i in range(in_img.shape[0]):
            for j in range(in_img.shape[1]):
                b = in_img[i, j, 0]
                g = in_img[i, j, 1]
                r = in_img[i, j, 2]
                maxc = max(r, g, b)
                minc = min(r, g, b)
                _l = (minc + maxc) / 2.0
                if minc == maxc:
                    h = 0.0
                    s = 0.0
                if _l <= 0.5:
                    s = (maxc - minc) / (maxc + minc)
                else:
                    s = (maxc - minc) / (2.0 - maxc - minc)
                if argmax[i, j] == 2:
                    h = 60 * (g - b) / (maxc - minc)
                elif argmax[i, j] == 1:
                    h = 60 * (2.0 + (b - r) / (maxc - minc))
                else:
                    h = 60 * (4.0 + (r - g) / (maxc - minc))
                if h < 0:
                    h += 360
                computed_hls[i, j, :] = [h, _l, s]
        assert_array_almost_equal(out_img, computed_hls, decimal=2)

Kai Chen's avatar
Kai Chen committed
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
    @pytest.mark.parametrize('src,dst,ref',
                             [('bgr', 'gray', cv2.COLOR_BGR2GRAY),
                              ('rgb', 'gray', cv2.COLOR_RGB2GRAY),
                              ('bgr', 'rgb', cv2.COLOR_BGR2RGB),
                              ('rgb', 'bgr', cv2.COLOR_RGB2BGR),
                              ('bgr', 'hsv', cv2.COLOR_BGR2HSV),
                              ('hsv', 'bgr', cv2.COLOR_HSV2BGR),
                              ('bgr', 'hls', cv2.COLOR_BGR2HLS),
                              ('hls', 'bgr', cv2.COLOR_HLS2BGR)])
    def test_imconvert(self, src, dst, ref):
        img = np.random.rand(10, 10, 3).astype(np.float32)
        assert_array_equal(
            mmcv.imconvert(img, src, dst), cv2.cvtColor(img, ref))


class TestGeometric:

    @classmethod
    def setup_class(cls):
        # the test img resolution is 400x300
        cls.img_path = osp.join(osp.dirname(__file__), 'data/color.jpg')
        cls.img = cv2.imread(cls.img_path)

262
263
    def test_imresize(self):
        resized_img = mmcv.imresize(self.img, (1000, 600))
264
        assert resized_img.shape == (600, 1000, 3)
265
266
        resized_img, w_scale, h_scale = mmcv.imresize(self.img, (1000, 600),
                                                      True)
267
268
        assert (resized_img.shape == (600, 1000, 3) and w_scale == 2.5
                and h_scale == 2.0)
Joanna's avatar
Joanna committed
269
270
271
272
273
        resized_img_dst = np.empty((600, 1000, 3), dtype=self.img.dtype)
        resized_img = mmcv.imresize(self.img, (1000, 600), out=resized_img_dst)
        assert id(resized_img_dst) == id(resized_img)
        assert_array_equal(resized_img_dst,
                           mmcv.imresize(self.img, (1000, 600)))
274
        for mode in ['nearest', 'bilinear', 'bicubic', 'area', 'lanczos']:
275
276
            resized_img = mmcv.imresize(
                self.img, (1000, 600), interpolation=mode)
277
278
            assert resized_img.shape == (600, 1000, 3)

279
    def test_imresize_like(self):
280
        a = np.zeros((100, 200, 3))
281
        resized_img = mmcv.imresize_like(self.img, a)
282
283
        assert resized_img.shape == (100, 200, 3)

Joanna's avatar
Joanna committed
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
    def test_rescale_size(self):
        new_size, scale_factor = mmcv.rescale_size((400, 300), 1.5, True)
        assert new_size == (600, 450) and scale_factor == 1.5
        new_size, scale_factor = mmcv.rescale_size((400, 300), 0.934, True)
        assert new_size == (374, 280) and scale_factor == 0.934

        new_size = mmcv.rescale_size((400, 300), 1.5)
        assert new_size == (600, 450)
        new_size = mmcv.rescale_size((400, 300), 0.934)
        assert new_size == (374, 280)

        new_size, scale_factor = mmcv.rescale_size((400, 300), (1000, 600),
                                                   True)
        assert new_size == (800, 600) and scale_factor == 2.0
        new_size, scale_factor = mmcv.rescale_size((400, 300), (180, 200),
                                                   True)
        assert new_size == (200, 150) and scale_factor == 0.5

        new_size = mmcv.rescale_size((400, 300), (1000, 600))
        assert new_size == (800, 600)
        new_size = mmcv.rescale_size((400, 300), (180, 200))
        assert new_size == (200, 150)

        with pytest.raises(ValueError):
            mmcv.rescale_size((400, 300), -0.5)
        with pytest.raises(TypeError):
            mmcv.rescale_size()((400, 300), [100, 100])

312
313
314
    def test_imrescale(self):
        # rescale by a certain factor
        resized_img = mmcv.imrescale(self.img, 1.5)
315
        assert resized_img.shape == (450, 600, 3)
316
        resized_img = mmcv.imrescale(self.img, 0.934)
317
318
        assert resized_img.shape == (280, 374, 3)

319
        # rescale by a certain max_size
320
        # resize (400, 300) to (max_1000, max_600)
321
        resized_img = mmcv.imrescale(self.img, (1000, 600))
322
        assert resized_img.shape == (600, 800, 3)
323
324
        resized_img, scale = mmcv.imrescale(
            self.img, (1000, 600), return_scale=True)
325
326
        assert resized_img.shape == (600, 800, 3) and scale == 2.0
        # resize (400, 300) to (max_200, max_180)
327
        resized_img = mmcv.imrescale(self.img, (180, 200))
328
        assert resized_img.shape == (150, 200, 3)
329
330
        resized_img, scale = mmcv.imrescale(
            self.img, (180, 200), return_scale=True)
331
        assert resized_img.shape == (150, 200, 3) and scale == 0.5
332
333

        # test exceptions
334
        with pytest.raises(ValueError):
335
336
337
            mmcv.imrescale(self.img, -0.5)
        with pytest.raises(TypeError):
            mmcv.imrescale(self.img, [100, 100])
338

Kai Chen's avatar
Kai Chen committed
339
    def test_imflip(self):
Kai Chen's avatar
Kai Chen committed
340
        # test horizontal flip (color image)
Kai Chen's avatar
Kai Chen committed
341
342
        img = np.random.rand(80, 60, 3)
        h, w, c = img.shape
Kai Chen's avatar
Kai Chen committed
343
344
        flipped_img = mmcv.imflip(img)
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
345
346
347
        for i in range(h):
            for j in range(w):
                for k in range(c):
Kai Chen's avatar
Kai Chen committed
348
349
350
351
                    assert flipped_img[i, j, k] == img[i, w - 1 - j, k]
        # test vertical flip (color image)
        flipped_img = mmcv.imflip(img, direction='vertical')
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
352
353
354
        for i in range(h):
            for j in range(w):
                for k in range(c):
Kai Chen's avatar
Kai Chen committed
355
356
                    assert flipped_img[i, j, k] == img[h - 1 - i, j, k]
        # test horizontal flip (grayscale image)
Kai Chen's avatar
Kai Chen committed
357
        img = np.random.rand(80, 60)
Kai Chen's avatar
Kai Chen committed
358
359
360
        h, w = img.shape
        flipped_img = mmcv.imflip(img)
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
361
362
        for i in range(h):
            for j in range(w):
Kai Chen's avatar
Kai Chen committed
363
364
365
366
                assert flipped_img[i, j] == img[i, w - 1 - j]
        # test vertical flip (grayscale image)
        flipped_img = mmcv.imflip(img, direction='vertical')
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
367
368
        for i in range(h):
            for j in range(w):
Kai Chen's avatar
Kai Chen committed
369
                assert flipped_img[i, j] == img[h - 1 - i, j]
Kai Chen's avatar
Kai Chen committed
370

Joanna's avatar
Joanna committed
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
    def test_imflip_(self):
        # test horizontal flip (color image)
        img = np.random.rand(80, 60, 3)
        h, w, c = img.shape
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip)
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                for k in range(c):
                    assert flipped_img[i, j, k] == img[i, w - 1 - j, k]
                    assert flipped_img[i, j, k] == img_for_flip[i, j, k]

        # test vertical flip (color image)
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip, direction='vertical')
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                for k in range(c):
                    assert flipped_img[i, j, k] == img[h - 1 - i, j, k]
                    assert flipped_img[i, j, k] == img_for_flip[i, j, k]

        # test horizontal flip (grayscale image)
        img = np.random.rand(80, 60)
        h, w = img.shape
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip)
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                assert flipped_img[i, j] == img[i, w - 1 - j]
                assert flipped_img[i, j] == img_for_flip[i, j]

        # test vertical flip (grayscale image)
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip, direction='vertical')
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                assert flipped_img[i, j] == img[h - 1 - i, j]
                assert flipped_img[i, j] == img_for_flip[i, j]

422
    def test_imcrop(self):
423
424
425
426
427
428
429
        # yapf: disable
        bboxes = np.array([[100, 100, 199, 199],  # center
                           [0, 0, 150, 100],  # left-top corner
                           [250, 200, 399, 299],  # right-bottom corner
                           [0, 100, 399, 199],  # wide
                           [150, 0, 299, 299]])  # tall
        # yapf: enable
430

431
        # crop one bbox
432
433
        patch = mmcv.imcrop(self.img, bboxes[0, :])
        patches = mmcv.imcrop(self.img, bboxes[[0], :])
434
435
436
        assert patch.shape == (100, 100, 3)
        patch_path = osp.join(osp.dirname(__file__), 'data/patches')
        ref_patch = np.load(patch_path + '/0.npy')
Kai Chen's avatar
Kai Chen committed
437
        assert_array_equal(patch, ref_patch)
438
        assert isinstance(patches, list) and len(patches) == 1
Kai Chen's avatar
Kai Chen committed
439
        assert_array_equal(patches[0], ref_patch)
440

441
        # crop with no scaling and padding
442
        patches = mmcv.imcrop(self.img, bboxes)
443
444
445
        assert len(patches) == bboxes.shape[0]
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/{}.npy'.format(i))
Kai Chen's avatar
Kai Chen committed
446
            assert_array_equal(patches[i], ref_patch)
447

448
        # crop with scaling and no padding
449
        patches = mmcv.imcrop(self.img, bboxes, 1.2)
450
451
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/scale_{}.npy'.format(i))
Kai Chen's avatar
Kai Chen committed
452
            assert_array_equal(patches[i], ref_patch)
453

454
        # crop with scaling and padding
455
        patches = mmcv.imcrop(self.img, bboxes, 1.2, pad_fill=[255, 255, 0])
456
457
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/pad_{}.npy'.format(i))
Kai Chen's avatar
Kai Chen committed
458
            assert_array_equal(patches[i], ref_patch)
459
        patches = mmcv.imcrop(self.img, bboxes, 1.2, pad_fill=0)
460
461
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/pad0_{}.npy'.format(i))
Kai Chen's avatar
Kai Chen committed
462
            assert_array_equal(patches[i], ref_patch)
463

464
    def test_impad(self):
465
466
467
468
469
470
471
472
473
474
        # grayscale image
        img = np.random.rand(10, 10).astype(np.float32)
        padded_img = mmcv.impad(img, (15, 12), 0)
        assert_array_equal(img, padded_img[:10, :10])
        assert_array_equal(
            np.zeros((5, 12), dtype='float32'), padded_img[10:, :])
        assert_array_equal(
            np.zeros((15, 2), dtype='float32'), padded_img[:, 10:])

        # RGB image
475
        img = np.random.rand(10, 10, 3).astype(np.float32)
476
        padded_img = mmcv.impad(img, (15, 12), 0)
477
478
479
480
481
        assert_array_equal(img, padded_img[:10, :10, :])
        assert_array_equal(
            np.zeros((5, 12, 3), dtype='float32'), padded_img[10:, :, :])
        assert_array_equal(
            np.zeros((15, 2, 3), dtype='float32'), padded_img[:, 10:, :])
482

483
        img = np.random.randint(256, size=(10, 10, 3)).astype('uint8')
484
        padded_img = mmcv.impad(img, (15, 12, 3), [100, 110, 120])
485
486
487
488
489
490
491
        assert_array_equal(img, padded_img[:10, :10, :])
        assert_array_equal(
            np.array([100, 110, 120], dtype='uint8') * np.ones(
                (5, 12, 3), dtype='uint8'), padded_img[10:, :, :])
        assert_array_equal(
            np.array([100, 110, 120], dtype='uint8') * np.ones(
                (15, 2, 3), dtype='uint8'), padded_img[:, 10:, :])
492

493
        with pytest.raises(AssertionError):
494
            mmcv.impad(img, (15, ), 0)
495
        with pytest.raises(AssertionError):
496
            mmcv.impad(img, (5, 5), 0)
497
        with pytest.raises(AssertionError):
498
            mmcv.impad(img, (5, 5), [0, 1])
499

Kai Chen's avatar
Kai Chen committed
500
501
502
503
504
505
506
507
508
509
510
    def test_impad_to_multiple(self):
        img = np.random.rand(11, 14, 3).astype(np.float32)
        padded_img = mmcv.impad_to_multiple(img, 4)
        assert padded_img.shape == (12, 16, 3)
        img = np.random.rand(20, 12).astype(np.float32)
        padded_img = mmcv.impad_to_multiple(img, 5)
        assert padded_img.shape == (20, 15)
        img = np.random.rand(20, 12).astype(np.float32)
        padded_img = mmcv.impad_to_multiple(img, 2)
        assert padded_img.shape == (20, 12)

511
    def test_imrotate(self):
512
        img = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]).astype(np.uint8)
513
        assert_array_equal(mmcv.imrotate(img, 0), img)
514
        img_r = np.array([[7, 4, 1], [8, 5, 2], [9, 6, 3]])
515
        assert_array_equal(mmcv.imrotate(img, 90), img_r)
516
        img_r = np.array([[3, 6, 9], [2, 5, 8], [1, 4, 7]])
517
        assert_array_equal(mmcv.imrotate(img, -90), img_r)
518
519
520

        img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]]).astype(np.uint8)
        img_r = np.array([[0, 6, 2, 0], [0, 7, 3, 0]])
521
        assert_array_equal(mmcv.imrotate(img, 90), img_r)
522
        img_r = np.array([[1, 0, 0, 0], [2, 0, 0, 0]])
523
        assert_array_equal(mmcv.imrotate(img, 90, center=(0, 0)), img_r)
524
        img_r = np.array([[255, 6, 2, 255], [255, 7, 3, 255]])
525
        assert_array_equal(mmcv.imrotate(img, 90, border_value=255), img_r)
526
        img_r = np.array([[5, 1], [6, 2], [7, 3], [8, 4]])
527
528
529
530
        assert_array_equal(mmcv.imrotate(img, 90, auto_bound=True), img_r)

        with pytest.raises(ValueError):
            mmcv.imrotate(img, 90, center=(0, 0), auto_bound=True)
Yue Zhao's avatar
Yue Zhao committed
531

Kai Chen's avatar
Kai Chen committed
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573

class TestPhotometric:

    @classmethod
    def setup_class(cls):
        # the test img resolution is 400x300
        cls.img_path = osp.join(osp.dirname(__file__), 'data/color.jpg')
        cls.img = cv2.imread(cls.img_path)
        cls.mean = np.array([123.675, 116.28, 103.53], dtype=np.float32)
        cls.std = np.array([58.395, 57.12, 57.375], dtype=np.float32)

    def test_imnormalize(self):
        rgb_img = self.img[:, :, ::-1]
        baseline = (rgb_img - self.mean) / self.std
        img = mmcv.imnormalize(self.img, self.mean, self.std)
        assert np.allclose(img, baseline)
        assert id(img) != id(self.img)
        img = mmcv.imnormalize(rgb_img, self.mean, self.std, to_rgb=False)
        assert np.allclose(img, baseline)
        assert id(img) != id(rgb_img)

    def test_imnormalize_(self):
        img_for_normalize = np.float32(self.img)
        rgb_img_for_normalize = np.float32(self.img[:, :, ::-1])
        baseline = (rgb_img_for_normalize - self.mean) / self.std
        img = mmcv.imnormalize_(img_for_normalize, self.mean, self.std)
        assert np.allclose(img_for_normalize, baseline)
        assert id(img) == id(img_for_normalize)
        img = mmcv.imnormalize_(
            rgb_img_for_normalize, self.mean, self.std, to_rgb=False)
        assert np.allclose(img, baseline)
        assert id(img) == id(rgb_img_for_normalize)

    def test_imdenormalize(self):
        norm_img = (self.img[:, :, ::-1] - self.mean) / self.std
        rgb_baseline = (norm_img * self.std + self.mean)
        bgr_baseline = rgb_baseline[:, :, ::-1]
        img = mmcv.imdenormalize(norm_img, self.mean, self.std)
        assert np.allclose(img, bgr_baseline)
        img = mmcv.imdenormalize(norm_img, self.mean, self.std, to_bgr=False)
        assert np.allclose(img, rgb_baseline)

Yue Zhao's avatar
Yue Zhao committed
574
575
576
577
578
579
    def test_iminvert(self):
        img = np.array([[0, 128, 255], [1, 127, 254], [2, 129, 253]],
                       dtype=np.uint8)
        img_r = np.array([[255, 127, 0], [254, 128, 1], [253, 126, 2]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.iminvert(img), img_r)
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599

    def test_solarize(self):
        img = np.array([[0, 128, 255], [1, 127, 254], [2, 129, 253]],
                       dtype=np.uint8)
        img_r = np.array([[0, 127, 0], [1, 127, 1], [2, 126, 2]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.solarize(img), img_r)
        img_r = np.array([[0, 127, 0], [1, 128, 1], [2, 126, 2]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.solarize(img, 100), img_r)

    def test_posterize(self):
        img = np.array([[0, 128, 255], [1, 127, 254], [2, 129, 253]],
                       dtype=np.uint8)
        img_r = np.array([[0, 128, 128], [0, 0, 128], [0, 128, 128]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.posterize(img, 1), img_r)
        img_r = np.array([[0, 128, 224], [0, 96, 224], [0, 128, 224]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.posterize(img, 3), img_r)