test_render_meshes.py 17 KB
Newer Older
facebook-github-bot's avatar
facebook-github-bot committed
1
2
3
4
5
6
7
8
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.


"""
Sanity checks for output images from the renderer.
"""
import unittest
from pathlib import Path
9
10

import numpy as np
facebook-github-bot's avatar
facebook-github-bot committed
11
import torch
Nikhila Ravi's avatar
Nikhila Ravi committed
12
from common_testing import TestCaseMixin, load_rgb_image
facebook-github-bot's avatar
facebook-github-bot committed
13
from PIL import Image
14
from pytorch3d.io import load_objs_as_meshes
15
from pytorch3d.renderer.cameras import OpenGLPerspectiveCameras, look_at_view_transform
facebook-github-bot's avatar
facebook-github-bot committed
16
17
from pytorch3d.renderer.lighting import PointLights
from pytorch3d.renderer.materials import Materials
18
from pytorch3d.renderer.mesh.rasterizer import MeshRasterizer, RasterizationSettings
facebook-github-bot's avatar
facebook-github-bot committed
19
20
21
from pytorch3d.renderer.mesh.renderer import MeshRenderer
from pytorch3d.renderer.mesh.shader import (
    BlendParams,
22
    HardFlatShader,
Patrick Labatut's avatar
Patrick Labatut committed
23
    HardGouraudShader,
24
25
26
    HardPhongShader,
    SoftSilhouetteShader,
    TexturedSoftPhongShader,
facebook-github-bot's avatar
facebook-github-bot committed
27
28
)
from pytorch3d.renderer.mesh.texturing import Textures
29
from pytorch3d.structures.meshes import Meshes, join_mesh
facebook-github-bot's avatar
facebook-github-bot committed
30
31
from pytorch3d.utils.ico_sphere import ico_sphere

32

Nikhila Ravi's avatar
Nikhila Ravi committed
33
# If DEBUG=True, save out images generated in the tests for debugging.
facebook-github-bot's avatar
facebook-github-bot committed
34
35
36
37
38
# All saved images have prefix DEBUG_
DEBUG = False
DATA_DIR = Path(__file__).resolve().parent / "data"


Nikhila Ravi's avatar
Nikhila Ravi committed
39
class TestRenderMeshes(TestCaseMixin, unittest.TestCase):
facebook-github-bot's avatar
facebook-github-bot committed
40
41
    def test_simple_sphere(self, elevated_camera=False):
        """
Patrick Labatut's avatar
Patrick Labatut committed
42
        Test output of phong and gouraud shading matches a reference image using
facebook-github-bot's avatar
facebook-github-bot committed
43
44
45
46
47
48
49
50
51
52
53
54
55
        the default values for the light sources.

        Args:
            elevated_camera: Defines whether the camera observing the scene should
                           have an elevation of 45 degrees.
        """
        device = torch.device("cuda:0")

        # Init mesh
        sphere_mesh = ico_sphere(5, device)
        verts_padded = sphere_mesh.verts_padded()
        faces_padded = sphere_mesh.faces_padded()
        textures = Textures(verts_rgb=torch.ones_like(verts_padded))
56
        sphere_mesh = Meshes(verts=verts_padded, faces=faces_padded, textures=textures)
facebook-github-bot's avatar
facebook-github-bot committed
57
58
59

        # Init rasterizer settings
        if elevated_camera:
60
61
            # Elevated and rotated camera
            R, T = look_at_view_transform(dist=2.7, elev=45.0, azim=45.0)
facebook-github-bot's avatar
facebook-github-bot committed
62
            postfix = "_elevated_camera"
63
64
            # If y axis is up, the spot of light should
            # be on the bottom left of the sphere.
facebook-github-bot's avatar
facebook-github-bot committed
65
        else:
66
            # No elevation or azimuth rotation
facebook-github-bot's avatar
facebook-github-bot committed
67
68
69
70
71
72
73
            R, T = look_at_view_transform(2.7, 0.0, 0.0)
            postfix = ""
        cameras = OpenGLPerspectiveCameras(device=device, R=R, T=T)

        # Init shader settings
        materials = Materials(device=device)
        lights = PointLights(device=device)
74
75
76
        lights.location = torch.tensor([0.0, 0.0, +2.0], device=device)[None]

        raster_settings = RasterizationSettings(
Nikhila Ravi's avatar
Nikhila Ravi committed
77
            image_size=512, blur_radius=0.0, faces_per_pixel=1
78
        )
79
        rasterizer = MeshRasterizer(cameras=cameras, raster_settings=raster_settings)
80
        blend_params = BlendParams(1e-4, 1e-4, (0, 0, 0))
Nikhila Ravi's avatar
Nikhila Ravi committed
81
82
83
84
85
86
87
88

        # Test several shaders
        shaders = {
            "phong": HardPhongShader,
            "gouraud": HardGouraudShader,
            "flat": HardFlatShader,
        }
        for (name, shader_init) in shaders.items():
89
90
91
92
93
94
            shader = shader_init(
                lights=lights,
                cameras=cameras,
                materials=materials,
                blend_params=blend_params,
            )
Nikhila Ravi's avatar
Nikhila Ravi committed
95
96
97
            renderer = MeshRenderer(rasterizer=rasterizer, shader=shader)
            images = renderer(sphere_mesh)
            filename = "simple_sphere_light_%s%s.png" % (name, postfix)
Nikhila Ravi's avatar
Nikhila Ravi committed
98
            image_ref = load_rgb_image("test_%s" % filename, DATA_DIR)
Nikhila Ravi's avatar
Nikhila Ravi committed
99
100
            rgb = images[0, ..., :3].squeeze().cpu()
            if DEBUG:
Nikhila Ravi's avatar
Nikhila Ravi committed
101
                filename = "DEBUG_%s" % filename
Nikhila Ravi's avatar
Nikhila Ravi committed
102
103
104
                Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(
                    DATA_DIR / filename
                )
Nikhila Ravi's avatar
Nikhila Ravi committed
105
            self.assertClose(rgb, image_ref, atol=0.05)
facebook-github-bot's avatar
facebook-github-bot committed
106

107
108
109
110
111
112
        ########################################################
        # Move the light to the +z axis in world space so it is
        # behind the sphere. Note that +Z is in, +Y up,
        # +X left for both world and camera space.
        ########################################################
        lights.location[..., 2] = -2.0
Nikhila Ravi's avatar
Nikhila Ravi committed
113
        phong_shader = HardPhongShader(
114
115
116
117
            lights=lights,
            cameras=cameras,
            materials=materials,
            blend_params=blend_params,
Nikhila Ravi's avatar
Nikhila Ravi committed
118
        )
119
        phong_renderer = MeshRenderer(rasterizer=rasterizer, shader=phong_shader)
Nikhila Ravi's avatar
Nikhila Ravi committed
120
        images = phong_renderer(sphere_mesh, lights=lights)
facebook-github-bot's avatar
facebook-github-bot committed
121
122
        rgb = images[0, ..., :3].squeeze().cpu()
        if DEBUG:
123
            filename = "DEBUG_simple_sphere_dark%s.png" % postfix
facebook-github-bot's avatar
facebook-github-bot committed
124
            Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(
125
                DATA_DIR / filename
facebook-github-bot's avatar
facebook-github-bot committed
126
127
128
            )

        # Load reference image
Nikhila Ravi's avatar
Nikhila Ravi committed
129
130
131
132
        image_ref_phong_dark = load_rgb_image(
            "test_simple_sphere_dark%s.png" % postfix, DATA_DIR
        )
        self.assertClose(rgb, image_ref_phong_dark, atol=0.05)
facebook-github-bot's avatar
facebook-github-bot committed
133
134
135

    def test_simple_sphere_elevated_camera(self):
        """
Patrick Labatut's avatar
Patrick Labatut committed
136
        Test output of phong and gouraud shading matches a reference image using
facebook-github-bot's avatar
facebook-github-bot committed
137
138
139
140
141
142
143
144
        the default values for the light sources.

        The rendering is performed with a camera that has non-zero elevation.
        """
        self.test_simple_sphere(elevated_camera=True)

    def test_simple_sphere_batched(self):
        """
Nikhila Ravi's avatar
Nikhila Ravi committed
145
146
        Test a mesh with vertex textures can be extended to form a batch, and
        is rendered correctly with Phong, Gouraud and Flat Shaders.
facebook-github-bot's avatar
facebook-github-bot committed
147
        """
Nikhila Ravi's avatar
Nikhila Ravi committed
148
        batch_size = 20
facebook-github-bot's avatar
facebook-github-bot committed
149
150
        device = torch.device("cuda:0")

Nikhila Ravi's avatar
Nikhila Ravi committed
151
        # Init mesh with vertex textures.
facebook-github-bot's avatar
facebook-github-bot committed
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
        sphere_meshes = ico_sphere(5, device).extend(batch_size)
        verts_padded = sphere_meshes.verts_padded()
        faces_padded = sphere_meshes.faces_padded()
        textures = Textures(verts_rgb=torch.ones_like(verts_padded))
        sphere_meshes = Meshes(
            verts=verts_padded, faces=faces_padded, textures=textures
        )

        # Init rasterizer settings
        dist = torch.tensor([2.7]).repeat(batch_size).to(device)
        elev = torch.zeros_like(dist)
        azim = torch.zeros_like(dist)
        R, T = look_at_view_transform(dist, elev, azim)
        cameras = OpenGLPerspectiveCameras(device=device, R=R, T=T)
        raster_settings = RasterizationSettings(
Nikhila Ravi's avatar
Nikhila Ravi committed
167
            image_size=512, blur_radius=0.0, faces_per_pixel=1
facebook-github-bot's avatar
facebook-github-bot committed
168
169
170
171
172
        )

        # Init shader settings
        materials = Materials(device=device)
        lights = PointLights(device=device)
173
        lights.location = torch.tensor([0.0, 0.0, +2.0], device=device)[None]
174
        blend_params = BlendParams(1e-4, 1e-4, (0, 0, 0))
facebook-github-bot's avatar
facebook-github-bot committed
175
176

        # Init renderer
177
        rasterizer = MeshRasterizer(cameras=cameras, raster_settings=raster_settings)
Nikhila Ravi's avatar
Nikhila Ravi committed
178
        shaders = {
179
            "phong": HardPhongShader,
Nikhila Ravi's avatar
Nikhila Ravi committed
180
181
182
183
            "gouraud": HardGouraudShader,
            "flat": HardFlatShader,
        }
        for (name, shader_init) in shaders.items():
184
185
186
187
188
189
            shader = shader_init(
                lights=lights,
                cameras=cameras,
                materials=materials,
                blend_params=blend_params,
            )
Nikhila Ravi's avatar
Nikhila Ravi committed
190
191
            renderer = MeshRenderer(rasterizer=rasterizer, shader=shader)
            images = renderer(sphere_meshes)
Nikhila Ravi's avatar
Nikhila Ravi committed
192
193
194
            image_ref = load_rgb_image(
                "test_simple_sphere_light_%s.png" % name, DATA_DIR
            )
Nikhila Ravi's avatar
Nikhila Ravi committed
195
196
            for i in range(batch_size):
                rgb = images[i, ..., :3].squeeze().cpu()
Nikhila Ravi's avatar
Nikhila Ravi committed
197
                self.assertClose(rgb, image_ref, atol=0.05)
facebook-github-bot's avatar
facebook-github-bot committed
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217

    def test_silhouette_with_grad(self):
        """
        Test silhouette blending. Also check that gradient calculation works.
        """
        device = torch.device("cuda:0")
        ref_filename = "test_silhouette.png"
        image_ref_filename = DATA_DIR / ref_filename
        sphere_mesh = ico_sphere(5, device)
        verts, faces = sphere_mesh.get_mesh_verts_faces(0)
        sphere_mesh = Meshes(verts=[verts], faces=[faces])

        blend_params = BlendParams(sigma=1e-4, gamma=1e-4)
        raster_settings = RasterizationSettings(
            image_size=512,
            blur_radius=np.log(1.0 / 1e-4 - 1.0) * blend_params.sigma,
            faces_per_pixel=80,
        )

        # Init rasterizer settings
218
        R, T = look_at_view_transform(2.7, 0, 0)
facebook-github-bot's avatar
facebook-github-bot committed
219
220
221
222
        cameras = OpenGLPerspectiveCameras(device=device, R=R, T=T)

        # Init renderer
        renderer = MeshRenderer(
223
            rasterizer=MeshRasterizer(cameras=cameras, raster_settings=raster_settings),
224
            shader=SoftSilhouetteShader(blend_params=blend_params),
facebook-github-bot's avatar
facebook-github-bot committed
225
226
227
228
229
        )
        images = renderer(sphere_mesh)
        alpha = images[0, ..., 3].squeeze().cpu()
        if DEBUG:
            Image.fromarray((alpha.numpy() * 255).astype(np.uint8)).save(
230
                DATA_DIR / "DEBUG_silhouette.png"
facebook-github-bot's avatar
facebook-github-bot committed
231
232
233
234
235
            )

        with Image.open(image_ref_filename) as raw_image_ref:
            image_ref = torch.from_numpy(np.array(raw_image_ref))
        image_ref = image_ref.to(dtype=torch.float32) / 255.0
Nikhila Ravi's avatar
Nikhila Ravi committed
236
        self.assertClose(alpha, image_ref, atol=0.055)
facebook-github-bot's avatar
facebook-github-bot committed
237
238
239
240
241
242
243
244
245
246

        # Check grad exist
        verts.requires_grad = True
        sphere_mesh = Meshes(verts=[verts], faces=[faces])
        images = renderer(sphere_mesh)
        images[0, ...].sum().backward()
        self.assertIsNotNone(verts.grad)

    def test_texture_map(self):
        """
247
248
        Test a mesh with a texture map is loaded and rendered correctly.
        The pupils in the eyes of the cow should always be looking to the left.
facebook-github-bot's avatar
facebook-github-bot committed
249
250
        """
        device = torch.device("cuda:0")
Nikhila Ravi's avatar
Nikhila Ravi committed
251
252
        obj_dir = Path(__file__).resolve().parent.parent / "docs/tutorials/data"
        obj_filename = obj_dir / "cow_mesh/cow.obj"
facebook-github-bot's avatar
facebook-github-bot committed
253
254

        # Load mesh + texture
255
        mesh = load_objs_as_meshes([obj_filename], device=device)
facebook-github-bot's avatar
facebook-github-bot committed
256
257

        # Init rasterizer settings
258
        R, T = look_at_view_transform(2.7, 0, 0)
facebook-github-bot's avatar
facebook-github-bot committed
259
260
        cameras = OpenGLPerspectiveCameras(device=device, R=R, T=T)
        raster_settings = RasterizationSettings(
Nikhila Ravi's avatar
Nikhila Ravi committed
261
            image_size=512, blur_radius=0.0, faces_per_pixel=1
facebook-github-bot's avatar
facebook-github-bot committed
262
263
264
265
266
        )

        # Init shader settings
        materials = Materials(device=device)
        lights = PointLights(device=device)
267
268
269
270

        # Place light behind the cow in world space. The front of
        # the cow is facing the -z direction.
        lights.location = torch.tensor([0.0, 0.0, 2.0], device=device)[None]
facebook-github-bot's avatar
facebook-github-bot committed
271
272
273

        # Init renderer
        renderer = MeshRenderer(
274
            rasterizer=MeshRasterizer(cameras=cameras, raster_settings=raster_settings),
275
            shader=TexturedSoftPhongShader(
facebook-github-bot's avatar
facebook-github-bot committed
276
277
278
279
280
                lights=lights, cameras=cameras, materials=materials
            ),
        )

        # Load reference image
Nikhila Ravi's avatar
Nikhila Ravi committed
281
        image_ref = load_rgb_image("test_texture_map_back.png", DATA_DIR)
facebook-github-bot's avatar
facebook-github-bot committed
282

Nikhila Ravi's avatar
Nikhila Ravi committed
283
284
285
286
287
        for bin_size in [0, None]:
            # Check both naive and coarse to fine produce the same output.
            renderer.rasterizer.raster_settings.bin_size = bin_size
            images = renderer(mesh)
            rgb = images[0, ..., :3].squeeze().cpu()
facebook-github-bot's avatar
facebook-github-bot committed
288

Nikhila Ravi's avatar
Nikhila Ravi committed
289
290
291
292
293
294
295
296
297
298
            if DEBUG:
                Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(
                    DATA_DIR / "DEBUG_texture_map_back.png"
                )

            # NOTE some pixels can be flaky and will not lead to
            # `cond1` being true. Add `cond2` and check `cond1 or cond2`
            cond1 = torch.allclose(rgb, image_ref, atol=0.05)
            cond2 = ((rgb - image_ref).abs() > 0.05).sum() < 5
            self.assertTrue(cond1 or cond2)
facebook-github-bot's avatar
facebook-github-bot committed
299
300

        # Check grad exists
301
        [verts] = mesh.verts_list()
facebook-github-bot's avatar
facebook-github-bot committed
302
        verts.requires_grad = True
303
        mesh2 = Meshes(verts=[verts], faces=mesh.faces_list(), textures=mesh.textures)
304
        images = renderer(mesh2)
facebook-github-bot's avatar
facebook-github-bot committed
305
306
        images[0, ...].sum().backward()
        self.assertIsNotNone(verts.grad)
307

308
309
310
311
312
313
314
315
316
317
318
        ##########################################
        # Check rendering of the front of the cow
        ##########################################

        R, T = look_at_view_transform(2.7, 0, 180)
        cameras = OpenGLPerspectiveCameras(device=device, R=R, T=T)

        # Move light to the front of the cow in world space
        lights.location = torch.tensor([0.0, 0.0, -2.0], device=device)[None]

        # Load reference image
Nikhila Ravi's avatar
Nikhila Ravi committed
319
        image_ref = load_rgb_image("test_texture_map_front.png", DATA_DIR)
320

Nikhila Ravi's avatar
Nikhila Ravi committed
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
        for bin_size in [0, None]:
            # Check both naive and coarse to fine produce the same output.
            renderer.rasterizer.raster_settings.bin_size = bin_size

            images = renderer(mesh, cameras=cameras, lights=lights)
            rgb = images[0, ..., :3].squeeze().cpu()

            if DEBUG:
                Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(
                    DATA_DIR / "DEBUG_texture_map_front.png"
                )

            # NOTE some pixels can be flaky and will not lead to
            # `cond1` being true. Add `cond2` and check `cond1 or cond2`
            cond1 = torch.allclose(rgb, image_ref, atol=0.05)
            cond2 = ((rgb - image_ref).abs() > 0.05).sum() < 5
            self.assertTrue(cond1 or cond2)
338

339
340
341
        #################################
        # Add blurring to rasterization
        #################################
342
343
        R, T = look_at_view_transform(2.7, 0, 180)
        cameras = OpenGLPerspectiveCameras(device=device, R=R, T=T)
344
345
346
347
348
349
350
351
        blend_params = BlendParams(sigma=5e-4, gamma=1e-4)
        raster_settings = RasterizationSettings(
            image_size=512,
            blur_radius=np.log(1.0 / 1e-4 - 1.0) * blend_params.sigma,
            faces_per_pixel=100,
        )

        # Load reference image
Nikhila Ravi's avatar
Nikhila Ravi committed
352
        image_ref = load_rgb_image("test_blurry_textured_rendering.png", DATA_DIR)
353

Nikhila Ravi's avatar
Nikhila Ravi committed
354
355
356
357
358
359
360
361
362
        for bin_size in [0, None]:
            # Check both naive and coarse to fine produce the same output.
            renderer.rasterizer.raster_settings.bin_size = bin_size

            images = renderer(
                mesh.clone(),
                cameras=cameras,
                raster_settings=raster_settings,
                blend_params=blend_params,
363
            )
Nikhila Ravi's avatar
Nikhila Ravi committed
364
365
366
367
368
369
            rgb = images[0, ..., :3].squeeze().cpu()

            if DEBUG:
                Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(
                    DATA_DIR / "DEBUG_blurry_textured_rendering.png"
                )
370

Nikhila Ravi's avatar
Nikhila Ravi committed
371
            self.assertClose(rgb, image_ref, atol=0.05)
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438

    def test_joined_spheres(self):
        """
        Test a list of Meshes can be joined as a single mesh and
        the single mesh is rendered correctly with Phong, Gouraud
        and Flat Shaders.
        """
        device = torch.device("cuda:0")

        # Init mesh with vertex textures.
        # Initialize a list containing two ico spheres of different sizes.
        sphere_list = [ico_sphere(3, device), ico_sphere(4, device)]
        # [(42 verts, 80 faces), (162 verts, 320 faces)]
        # The scale the vertices need to be set at to resize the spheres
        scales = [0.25, 1]
        # The distance the spheres ought to be offset horizontally to prevent overlap.
        offsets = [1.2, -0.3]
        # Initialize a list containing the adjusted sphere meshes.
        sphere_mesh_list = []
        for i in range(len(sphere_list)):
            verts = sphere_list[i].verts_padded() * scales[i]
            verts[0, :, 0] += offsets[i]
            sphere_mesh_list.append(
                Meshes(verts=verts, faces=sphere_list[i].faces_padded())
            )
        joined_sphere_mesh = join_mesh(sphere_mesh_list)
        joined_sphere_mesh.textures = Textures(
            verts_rgb=torch.ones_like(joined_sphere_mesh.verts_padded())
        )

        # Init rasterizer settings
        R, T = look_at_view_transform(2.7, 0.0, 0.0)
        cameras = OpenGLPerspectiveCameras(device=device, R=R, T=T)
        raster_settings = RasterizationSettings(
            image_size=512, blur_radius=0.0, faces_per_pixel=1
        )

        # Init shader settings
        materials = Materials(device=device)
        lights = PointLights(device=device)
        lights.location = torch.tensor([0.0, 0.0, +2.0], device=device)[None]
        blend_params = BlendParams(1e-4, 1e-4, (0, 0, 0))

        # Init renderer
        rasterizer = MeshRasterizer(cameras=cameras, raster_settings=raster_settings)
        shaders = {
            "phong": HardPhongShader,
            "gouraud": HardGouraudShader,
            "flat": HardFlatShader,
        }
        for (name, shader_init) in shaders.items():
            shader = shader_init(
                lights=lights,
                cameras=cameras,
                materials=materials,
                blend_params=blend_params,
            )
            renderer = MeshRenderer(rasterizer=rasterizer, shader=shader)
            image = renderer(joined_sphere_mesh)
            rgb = image[..., :3].squeeze().cpu()
            if DEBUG:
                file_name = "DEBUG_joined_spheres_%s.png" % name
                Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(
                    DATA_DIR / file_name
                )
            image_ref = load_rgb_image("test_joined_spheres_%s.png" % name, DATA_DIR)
            self.assertClose(rgb, image_ref, atol=0.05)