test_image.py 20.9 KB
Newer Older
Kai Chen's avatar
Kai Chen committed
1
# Copyright (c) Open-MMLab. All rights reserved.
2
3
4
5
import os
import os.path as osp
import tempfile

6
import cv2
7
8
import numpy as np
import pytest
Kai Chen's avatar
Kai Chen committed
9
10
11
from numpy.testing import assert_array_almost_equal, assert_array_equal

import mmcv
12
13
14
15
16
17
18
19
20
21


class TestImage(object):

    @classmethod
    def setup_class(cls):
        # the test img resolution is 400x300
        cls.img_path = osp.join(osp.dirname(__file__), 'data/color.jpg')
        cls.gray_img_path = osp.join(
            osp.dirname(__file__), 'data/grayscale.jpg')
22
        cls.img = cv2.imread(cls.img_path)
lizz's avatar
lizz committed
23
24
        cls.mean = np.float32(np.array([123.675, 116.28, 103.53]))
        cls.std = np.float32(np.array([58.395, 57.12, 57.375]))
25
26
27
28
29
30
31
32

    def assert_img_equal(self, img, ref_img, ratio_thr=0.999):
        assert img.shape == ref_img.shape
        assert img.dtype == ref_img.dtype
        area = ref_img.shape[0] * ref_img.shape[1]
        diff = np.abs(img.astype('int32') - ref_img.astype('int32'))
        assert np.sum(diff <= 1) / float(area) > ratio_thr

33
    def test_imread(self):
Joanna's avatar
Joanna committed
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
        # backend cv2
        mmcv.use_backend('cv2')

        img_cv2_color_bgr = mmcv.imread(self.img_path)
        assert img_cv2_color_bgr.shape == (300, 400, 3)
        img_cv2_color_rgb = mmcv.imread(self.img_path, channel_order='rgb')
        assert img_cv2_color_rgb.shape == (300, 400, 3)
        assert_array_equal(img_cv2_color_rgb[:, :, ::-1], img_cv2_color_bgr)
        img_cv2_grayscale1 = mmcv.imread(self.img_path, 'grayscale')
        assert img_cv2_grayscale1.shape == (300, 400)
        img_cv2_grayscale2 = mmcv.imread(self.gray_img_path)
        assert img_cv2_grayscale2.shape == (300, 400, 3)
        img_cv2_unchanged = mmcv.imread(self.gray_img_path, 'unchanged')
        assert img_cv2_unchanged.shape == (300, 400)
        img_cv2_unchanged = mmcv.imread(img_cv2_unchanged)
        assert_array_equal(img_cv2_unchanged, mmcv.imread(img_cv2_unchanged))
50
        with pytest.raises(TypeError):
51
            mmcv.imread(1)
52

Joanna's avatar
Joanna committed
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
        # backend turbojpeg
        mmcv.use_backend('turbojpeg')

        img_turbojpeg_color_bgr = mmcv.imread(self.img_path)
        assert img_turbojpeg_color_bgr.shape == (300, 400, 3)
        assert_array_equal(img_turbojpeg_color_bgr, img_cv2_color_bgr)

        img_turbojpeg_color_rgb = mmcv.imread(
            self.img_path, channel_order='rgb')
        assert img_turbojpeg_color_rgb.shape == (300, 400, 3)
        assert_array_equal(img_turbojpeg_color_rgb, img_cv2_color_rgb)

        with pytest.raises(ValueError):
            mmcv.imread(self.img_path, channel_order='unsupport_order')

        img_turbojpeg_grayscale1 = mmcv.imread(self.img_path, flag='grayscale')
        assert img_turbojpeg_grayscale1.shape == (300, 400)
        assert_array_equal(img_turbojpeg_grayscale1, img_cv2_grayscale1)

        img_turbojpeg_grayscale2 = mmcv.imread(self.gray_img_path)
        assert img_turbojpeg_grayscale2.shape == (300, 400, 3)
        assert_array_equal(img_turbojpeg_grayscale2, img_cv2_grayscale2)

        img_turbojpeg_grayscale2 = mmcv.imread(img_turbojpeg_grayscale2)
        assert_array_equal(img_turbojpeg_grayscale2,
                           mmcv.imread(img_turbojpeg_grayscale2))

        with pytest.raises(ValueError):
            mmcv.imread(self.gray_img_path, 'unchanged')

        with pytest.raises(TypeError):
            mmcv.imread(1)

        with pytest.raises(AssertionError):
            mmcv.use_backend('unsupport_backend')

        mmcv.use_backend('cv2')

91
    def test_imfrombytes(self):
Joanna's avatar
Joanna committed
92
93
        # backend cv2
        mmcv.use_backend('cv2')
94
95
        with open(self.img_path, 'rb') as f:
            img_bytes = f.read()
Joanna's avatar
Joanna committed
96
97
98
99
100
101
102
103
104
105
106
107
        img_cv2 = mmcv.imfrombytes(img_bytes)
        assert img_cv2.shape == (300, 400, 3)

        # backend turbojpeg
        mmcv.use_backend('turbojpeg')
        with open(self.img_path, 'rb') as f:
            img_bytes = f.read()
        img_turbojpeg = mmcv.imfrombytes(img_bytes)
        assert img_turbojpeg.shape == (300, 400, 3)
        assert_array_equal(img_cv2, img_turbojpeg)

        mmcv.use_backend('cv2')
108

109
110
    def test_imwrite(self):
        img = mmcv.imread(self.img_path)
111
        out_file = osp.join(tempfile.gettempdir(), 'mmcv_test.jpg')
112
113
        mmcv.imwrite(img, out_file)
        rewrite_img = mmcv.imread(out_file)
114
115
116
        os.remove(out_file)
        self.assert_img_equal(img, rewrite_img)

lizz's avatar
lizz committed
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
    def test_imnormalize(self):
        rgbimg = self.img[:, :, ::-1]
        baseline = (rgbimg - self.mean) / self.std
        img = mmcv.imnormalize(self.img, self.mean, self.std)
        assert np.allclose(img, baseline)
        img = mmcv.imnormalize(rgbimg, self.mean, self.std, to_rgb=False)
        assert np.allclose(img, baseline)

    def test_imdenormalize(self):
        normimg = (self.img[:, :, ::-1] - self.mean) / self.std
        rgbbaseline = (normimg * self.std + self.mean)
        bgrbaseline = rgbbaseline[:, :, ::-1]
        img = mmcv.imdenormalize(normimg, self.mean, self.std)
        assert np.allclose(img, bgrbaseline)
        img = mmcv.imdenormalize(normimg, self.mean, self.std, to_bgr=False)
        assert np.allclose(img, rgbbaseline)

134
135
136
    def test_bgr2gray(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2gray(in_img)
Kai Chen's avatar
Kai Chen committed
137
138
139
        computed_gray = (
            in_img[:, :, 0] * 0.114 + in_img[:, :, 1] * 0.587 +
            in_img[:, :, 2] * 0.299)
140
141
142
143
144
        assert_array_almost_equal(out_img, computed_gray, decimal=4)
        out_img_3d = mmcv.bgr2gray(in_img, True)
        assert out_img_3d.shape == (10, 10, 1)
        assert_array_almost_equal(out_img_3d[..., 0], out_img, decimal=4)

Kai Chen's avatar
Kai Chen committed
145
146
147
148
149
150
151
152
153
154
155
    def test_rgb2gray(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.rgb2gray(in_img)
        computed_gray = (
            in_img[:, :, 0] * 0.299 + in_img[:, :, 1] * 0.587 +
            in_img[:, :, 2] * 0.114)
        assert_array_almost_equal(out_img, computed_gray, decimal=4)
        out_img_3d = mmcv.rgb2gray(in_img, True)
        assert out_img_3d.shape == (10, 10, 1)
        assert_array_almost_equal(out_img_3d[..., 0], out_img, decimal=4)

156
157
158
159
160
161
162
    def test_gray2bgr(self):
        in_img = np.random.rand(10, 10).astype(np.float32)
        out_img = mmcv.gray2bgr(in_img)
        assert out_img.shape == (10, 10, 3)
        for i in range(3):
            assert_array_almost_equal(out_img[..., i], in_img, decimal=4)

Kai Chen's avatar
Kai Chen committed
163
164
165
166
167
168
169
    def test_gray2rgb(self):
        in_img = np.random.rand(10, 10).astype(np.float32)
        out_img = mmcv.gray2rgb(in_img)
        assert out_img.shape == (10, 10, 3)
        for i in range(3):
            assert_array_almost_equal(out_img[..., i], in_img, decimal=4)

170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
    def test_bgr2rgb(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2rgb(in_img)
        assert out_img.shape == in_img.shape
        assert_array_equal(out_img[..., 0], in_img[..., 2])
        assert_array_equal(out_img[..., 1], in_img[..., 1])
        assert_array_equal(out_img[..., 2], in_img[..., 0])

    def test_rgb2bgr(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.rgb2bgr(in_img)
        assert out_img.shape == in_img.shape
        assert_array_equal(out_img[..., 0], in_img[..., 2])
        assert_array_equal(out_img[..., 1], in_img[..., 1])
        assert_array_equal(out_img[..., 2], in_img[..., 0])

    def test_bgr2hsv(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2hsv(in_img)
        argmax = in_img.argmax(axis=2)
        computed_hsv = np.empty_like(in_img, dtype=in_img.dtype)
        for i in range(in_img.shape[0]):
            for j in range(in_img.shape[1]):
                b = in_img[i, j, 0]
                g = in_img[i, j, 1]
                r = in_img[i, j, 2]
                v = max(r, g, b)
                s = (v - min(r, g, b)) / v if v != 0 else 0
                if argmax[i, j] == 0:
                    h = 240 + 60 * (r - g) / (v - min(r, g, b))
                elif argmax[i, j] == 1:
                    h = 120 + 60 * (b - r) / (v - min(r, g, b))
                else:
                    h = 60 * (g - b) / (v - min(r, g, b))
                if h < 0:
                    h += 360
                computed_hsv[i, j, :] = [h, s, v]
        assert_array_almost_equal(out_img, computed_hsv, decimal=2)

Gu Wang's avatar
Gu Wang committed
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
    def test_bgr2hls(self):
        in_img = np.random.rand(10, 10, 3).astype(np.float32)
        out_img = mmcv.bgr2hls(in_img)
        argmax = in_img.argmax(axis=2)
        computed_hls = np.empty_like(in_img, dtype=in_img.dtype)
        for i in range(in_img.shape[0]):
            for j in range(in_img.shape[1]):
                b = in_img[i, j, 0]
                g = in_img[i, j, 1]
                r = in_img[i, j, 2]
                maxc = max(r, g, b)
                minc = min(r, g, b)
                _l = (minc + maxc) / 2.0
                if minc == maxc:
                    h = 0.0
                    s = 0.0
                if _l <= 0.5:
                    s = (maxc - minc) / (maxc + minc)
                else:
                    s = (maxc - minc) / (2.0 - maxc - minc)
                if argmax[i, j] == 2:
                    h = 60 * (g - b) / (maxc - minc)
                elif argmax[i, j] == 1:
                    h = 60 * (2.0 + (b - r) / (maxc - minc))
                else:
                    h = 60 * (4.0 + (r - g) / (maxc - minc))
                if h < 0:
                    h += 360
                computed_hls[i, j, :] = [h, _l, s]
        assert_array_almost_equal(out_img, computed_hls, decimal=2)

240
241
    def test_imresize(self):
        resized_img = mmcv.imresize(self.img, (1000, 600))
242
        assert resized_img.shape == (600, 1000, 3)
243
244
        resized_img, w_scale, h_scale = mmcv.imresize(self.img, (1000, 600),
                                                      True)
245
246
247
        assert (resized_img.shape == (600, 1000, 3) and w_scale == 2.5
                and h_scale == 2.0)
        for mode in ['nearest', 'bilinear', 'bicubic', 'area', 'lanczos']:
248
249
            resized_img = mmcv.imresize(
                self.img, (1000, 600), interpolation=mode)
250
251
            assert resized_img.shape == (600, 1000, 3)

252
    def test_imresize_like(self):
253
        a = np.zeros((100, 200, 3))
254
        resized_img = mmcv.imresize_like(self.img, a)
255
256
        assert resized_img.shape == (100, 200, 3)

257
258
259
    def test_imrescale(self):
        # rescale by a certain factor
        resized_img = mmcv.imrescale(self.img, 1.5)
260
        assert resized_img.shape == (450, 600, 3)
261
        resized_img = mmcv.imrescale(self.img, 0.934)
262
263
        assert resized_img.shape == (280, 374, 3)

264
        # rescale by a certain max_size
265
        # resize (400, 300) to (max_1000, max_600)
266
        resized_img = mmcv.imrescale(self.img, (1000, 600))
267
        assert resized_img.shape == (600, 800, 3)
268
269
        resized_img, scale = mmcv.imrescale(
            self.img, (1000, 600), return_scale=True)
270
271
        assert resized_img.shape == (600, 800, 3) and scale == 2.0
        # resize (400, 300) to (max_200, max_180)
272
        resized_img = mmcv.imrescale(self.img, (180, 200))
273
        assert resized_img.shape == (150, 200, 3)
274
275
        resized_img, scale = mmcv.imrescale(
            self.img, (180, 200), return_scale=True)
276
        assert resized_img.shape == (150, 200, 3) and scale == 0.5
277
278

        # test exceptions
279
        with pytest.raises(ValueError):
280
281
282
            mmcv.imrescale(self.img, -0.5)
        with pytest.raises(TypeError):
            mmcv.imrescale(self.img, [100, 100])
283

Kai Chen's avatar
Kai Chen committed
284
    def test_imflip(self):
Kai Chen's avatar
Kai Chen committed
285
        # test horizontal flip (color image)
Kai Chen's avatar
Kai Chen committed
286
287
        img = np.random.rand(80, 60, 3)
        h, w, c = img.shape
Kai Chen's avatar
Kai Chen committed
288
289
        flipped_img = mmcv.imflip(img)
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
290
291
292
        for i in range(h):
            for j in range(w):
                for k in range(c):
Kai Chen's avatar
Kai Chen committed
293
294
295
296
                    assert flipped_img[i, j, k] == img[i, w - 1 - j, k]
        # test vertical flip (color image)
        flipped_img = mmcv.imflip(img, direction='vertical')
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
297
298
299
        for i in range(h):
            for j in range(w):
                for k in range(c):
Kai Chen's avatar
Kai Chen committed
300
301
                    assert flipped_img[i, j, k] == img[h - 1 - i, j, k]
        # test horizontal flip (grayscale image)
Kai Chen's avatar
Kai Chen committed
302
        img = np.random.rand(80, 60)
Kai Chen's avatar
Kai Chen committed
303
304
305
        h, w = img.shape
        flipped_img = mmcv.imflip(img)
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
306
307
        for i in range(h):
            for j in range(w):
Kai Chen's avatar
Kai Chen committed
308
309
310
311
                assert flipped_img[i, j] == img[i, w - 1 - j]
        # test vertical flip (grayscale image)
        flipped_img = mmcv.imflip(img, direction='vertical')
        assert flipped_img.shape == img.shape
Kai Chen's avatar
Kai Chen committed
312
313
        for i in range(h):
            for j in range(w):
Kai Chen's avatar
Kai Chen committed
314
                assert flipped_img[i, j] == img[h - 1 - i, j]
Kai Chen's avatar
Kai Chen committed
315

Joanna's avatar
Joanna committed
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
    def test_imflip_(self):
        # test horizontal flip (color image)
        img = np.random.rand(80, 60, 3)
        h, w, c = img.shape
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip)
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                for k in range(c):
                    assert flipped_img[i, j, k] == img[i, w - 1 - j, k]
                    assert flipped_img[i, j, k] == img_for_flip[i, j, k]

        # test vertical flip (color image)
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip, direction='vertical')
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                for k in range(c):
                    assert flipped_img[i, j, k] == img[h - 1 - i, j, k]
                    assert flipped_img[i, j, k] == img_for_flip[i, j, k]

        # test horizontal flip (grayscale image)
        img = np.random.rand(80, 60)
        h, w = img.shape
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip)
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                assert flipped_img[i, j] == img[i, w - 1 - j]
                assert flipped_img[i, j] == img_for_flip[i, j]

        # test vertical flip (grayscale image)
        img_for_flip = img.copy()
        flipped_img = mmcv.imflip_(img_for_flip, direction='vertical')
        assert flipped_img.shape == img.shape
        assert flipped_img.shape == img_for_flip.shape
        assert id(flipped_img) == id(img_for_flip)
        for i in range(h):
            for j in range(w):
                assert flipped_img[i, j] == img[h - 1 - i, j]
                assert flipped_img[i, j] == img_for_flip[i, j]

367
    def test_imcrop(self):
368
369
370
371
372
373
374
        # yapf: disable
        bboxes = np.array([[100, 100, 199, 199],  # center
                           [0, 0, 150, 100],  # left-top corner
                           [250, 200, 399, 299],  # right-bottom corner
                           [0, 100, 399, 199],  # wide
                           [150, 0, 299, 299]])  # tall
        # yapf: enable
375

376
        # crop one bbox
377
378
        patch = mmcv.imcrop(self.img, bboxes[0, :])
        patches = mmcv.imcrop(self.img, bboxes[[0], :])
379
380
381
382
383
384
        assert patch.shape == (100, 100, 3)
        patch_path = osp.join(osp.dirname(__file__), 'data/patches')
        ref_patch = np.load(patch_path + '/0.npy')
        self.assert_img_equal(patch, ref_patch)
        assert isinstance(patches, list) and len(patches) == 1
        self.assert_img_equal(patches[0], ref_patch)
385

386
        # crop with no scaling and padding
387
        patches = mmcv.imcrop(self.img, bboxes)
388
389
390
391
        assert len(patches) == bboxes.shape[0]
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/{}.npy'.format(i))
            self.assert_img_equal(patches[i], ref_patch)
392

393
        # crop with scaling and no padding
394
        patches = mmcv.imcrop(self.img, bboxes, 1.2)
395
396
397
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/scale_{}.npy'.format(i))
            self.assert_img_equal(patches[i], ref_patch)
398

399
        # crop with scaling and padding
400
        patches = mmcv.imcrop(self.img, bboxes, 1.2, pad_fill=[255, 255, 0])
401
402
403
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/pad_{}.npy'.format(i))
            self.assert_img_equal(patches[i], ref_patch)
404
        patches = mmcv.imcrop(self.img, bboxes, 1.2, pad_fill=0)
405
406
407
408
        for i in range(len(patches)):
            ref_patch = np.load(patch_path + '/pad0_{}.npy'.format(i))
            self.assert_img_equal(patches[i], ref_patch)

409
    def test_impad(self):
410
411
412
413
414
415
416
417
418
419
        # grayscale image
        img = np.random.rand(10, 10).astype(np.float32)
        padded_img = mmcv.impad(img, (15, 12), 0)
        assert_array_equal(img, padded_img[:10, :10])
        assert_array_equal(
            np.zeros((5, 12), dtype='float32'), padded_img[10:, :])
        assert_array_equal(
            np.zeros((15, 2), dtype='float32'), padded_img[:, 10:])

        # RGB image
420
        img = np.random.rand(10, 10, 3).astype(np.float32)
421
        padded_img = mmcv.impad(img, (15, 12), 0)
422
423
424
425
426
        assert_array_equal(img, padded_img[:10, :10, :])
        assert_array_equal(
            np.zeros((5, 12, 3), dtype='float32'), padded_img[10:, :, :])
        assert_array_equal(
            np.zeros((15, 2, 3), dtype='float32'), padded_img[:, 10:, :])
427

428
        img = np.random.randint(256, size=(10, 10, 3)).astype('uint8')
429
        padded_img = mmcv.impad(img, (15, 12, 3), [100, 110, 120])
430
431
432
433
434
435
436
        assert_array_equal(img, padded_img[:10, :10, :])
        assert_array_equal(
            np.array([100, 110, 120], dtype='uint8') * np.ones(
                (5, 12, 3), dtype='uint8'), padded_img[10:, :, :])
        assert_array_equal(
            np.array([100, 110, 120], dtype='uint8') * np.ones(
                (15, 2, 3), dtype='uint8'), padded_img[:, 10:, :])
437

438
        with pytest.raises(AssertionError):
439
            mmcv.impad(img, (15, ), 0)
440
        with pytest.raises(AssertionError):
441
            mmcv.impad(img, (5, 5), 0)
442
        with pytest.raises(AssertionError):
443
            mmcv.impad(img, (5, 5), [0, 1])
444

Kai Chen's avatar
Kai Chen committed
445
446
447
448
449
450
451
452
453
454
455
    def test_impad_to_multiple(self):
        img = np.random.rand(11, 14, 3).astype(np.float32)
        padded_img = mmcv.impad_to_multiple(img, 4)
        assert padded_img.shape == (12, 16, 3)
        img = np.random.rand(20, 12).astype(np.float32)
        padded_img = mmcv.impad_to_multiple(img, 5)
        assert padded_img.shape == (20, 15)
        img = np.random.rand(20, 12).astype(np.float32)
        padded_img = mmcv.impad_to_multiple(img, 2)
        assert padded_img.shape == (20, 12)

456
    def test_imrotate(self):
457
        img = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]).astype(np.uint8)
458
        assert_array_equal(mmcv.imrotate(img, 0), img)
459
        img_r = np.array([[7, 4, 1], [8, 5, 2], [9, 6, 3]])
460
        assert_array_equal(mmcv.imrotate(img, 90), img_r)
461
        img_r = np.array([[3, 6, 9], [2, 5, 8], [1, 4, 7]])
462
        assert_array_equal(mmcv.imrotate(img, -90), img_r)
463
464
465

        img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]]).astype(np.uint8)
        img_r = np.array([[0, 6, 2, 0], [0, 7, 3, 0]])
466
        assert_array_equal(mmcv.imrotate(img, 90), img_r)
467
        img_r = np.array([[1, 0, 0, 0], [2, 0, 0, 0]])
468
        assert_array_equal(mmcv.imrotate(img, 90, center=(0, 0)), img_r)
469
        img_r = np.array([[255, 6, 2, 255], [255, 7, 3, 255]])
470
        assert_array_equal(mmcv.imrotate(img, 90, border_value=255), img_r)
471
        img_r = np.array([[5, 1], [6, 2], [7, 3], [8, 4]])
472
473
474
475
        assert_array_equal(mmcv.imrotate(img, 90, auto_bound=True), img_r)

        with pytest.raises(ValueError):
            mmcv.imrotate(img, 90, center=(0, 0), auto_bound=True)
Yue Zhao's avatar
Yue Zhao committed
476
477
478
479
480
481
482

    def test_iminvert(self):
        img = np.array([[0, 128, 255], [1, 127, 254], [2, 129, 253]],
                       dtype=np.uint8)
        img_r = np.array([[255, 127, 0], [254, 128, 1], [253, 126, 2]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.iminvert(img), img_r)
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502

    def test_solarize(self):
        img = np.array([[0, 128, 255], [1, 127, 254], [2, 129, 253]],
                       dtype=np.uint8)
        img_r = np.array([[0, 127, 0], [1, 127, 1], [2, 126, 2]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.solarize(img), img_r)
        img_r = np.array([[0, 127, 0], [1, 128, 1], [2, 126, 2]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.solarize(img, 100), img_r)

    def test_posterize(self):
        img = np.array([[0, 128, 255], [1, 127, 254], [2, 129, 253]],
                       dtype=np.uint8)
        img_r = np.array([[0, 128, 128], [0, 0, 128], [0, 128, 128]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.posterize(img, 1), img_r)
        img_r = np.array([[0, 128, 224], [0, 96, 224], [0, 128, 224]],
                         dtype=np.uint8)
        assert_array_equal(mmcv.posterize(img, 3), img_r)