test_io_obj.py 44 KB
Newer Older
1
# Copyright (c) Meta Platforms, Inc. and affiliates.
Patrick Labatut's avatar
Patrick Labatut committed
2
3
4
5
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
facebook-github-bot's avatar
facebook-github-bot committed
6
7
8

import os
import unittest
9
import warnings
Nikhila Ravi's avatar
Nikhila Ravi committed
10
from collections import Counter
facebook-github-bot's avatar
facebook-github-bot committed
11
12
from io import StringIO
from pathlib import Path
Nikhila Ravi's avatar
Nikhila Ravi committed
13
from tempfile import NamedTemporaryFile, TemporaryDirectory
facebook-github-bot's avatar
facebook-github-bot committed
14

15
import torch
Nikhila Ravi's avatar
Nikhila Ravi committed
16
17
18
19
20
21
from common_testing import (
    TestCaseMixin,
    get_pytorch3d_dir,
    get_tests_dir,
    load_rgb_image,
)
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
22
from iopath.common.file_io import PathManager
23
from pytorch3d.io import IO, load_obj, load_objs_as_meshes, save_obj
24
25
26
from pytorch3d.io.mtl_io import (
    _bilinear_interpolation_grid_sample,
    _bilinear_interpolation_vectorized,
27
    _parse_mtl,
28
)
Nikhila Ravi's avatar
Nikhila Ravi committed
29
30
from pytorch3d.renderer import TexturesAtlas, TexturesUV, TexturesVertex
from pytorch3d.structures import Meshes, join_meshes_as_batch
31
from pytorch3d.utils import torus
facebook-github-bot's avatar
facebook-github-bot committed
32

Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
33

34
35
36
DATA_DIR = get_tests_dir() / "data"
TUTORIAL_DATA_DIR = get_pytorch3d_dir() / "docs/tutorials/data"

37
38

class TestMeshObjIO(TestCaseMixin, unittest.TestCase):
facebook-github-bot's avatar
facebook-github-bot committed
39
40
41
42
43
44
45
46
47
48
49
50
    def test_load_obj_simple(self):
        obj_file = "\n".join(
            [
                "# this is a comment",  # Comments should be ignored.
                "v 0.1 0.2 0.3",
                "v 0.2 0.3 0.4",
                "v 0.3 0.4 0.5",
                "v  0.4 0.5 0.6",  # some obj files have multiple spaces after v
                "f 1 2 3",
                "f 1 2 4 3 1",  # Polygons should be split into triangles
            ]
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
51
52
53
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
54

Nikhila Ravi's avatar
Nikhila Ravi committed
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
            verts, faces, aux = load_obj(Path(f.name))
            normals = aux.normals
            textures = aux.verts_uvs
            materials = aux.material_colors
            tex_maps = aux.texture_images

            expected_verts = torch.tensor(
                [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
                dtype=torch.float32,
            )
            expected_faces = torch.tensor(
                [
                    [0, 1, 2],  # First face
                    [0, 1, 3],  # Second face (polygon)
                    [0, 3, 2],  # Second face (polygon)
                    [0, 2, 0],  # Second face (polygon)
                ],
                dtype=torch.int64,
            )
            self.assertTrue(torch.all(verts == expected_verts))
            self.assertTrue(torch.all(faces.verts_idx == expected_faces))
            padded_vals = -(torch.ones_like(faces.verts_idx))
            self.assertTrue(torch.all(faces.normals_idx == padded_vals))
            self.assertTrue(torch.all(faces.textures_idx == padded_vals))
            self.assertTrue(
                torch.all(faces.materials_idx == -(torch.ones(len(expected_faces))))
            )
            self.assertTrue(normals is None)
            self.assertTrue(textures is None)
            self.assertTrue(materials is None)
            self.assertTrue(tex_maps is None)
facebook-github-bot's avatar
facebook-github-bot committed
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108

    def test_load_obj_complex(self):
        obj_file = "\n".join(
            [
                "# this is a comment",  # Comments should be ignored.
                "v 0.1 0.2 0.3",
                "v 0.2 0.3 0.4",
                "v 0.3 0.4 0.5",
                "v 0.4 0.5 0.6",
                "vn 0.000000 0.000000 -1.000000",
                "vn -1.000000 -0.000000 -0.000000",
                "vn -0.000000 -0.000000 1.000000",  # Normals should not be ignored.
                "v 0.5 0.6 0.7",
                "vt 0.749279 0.501284 0.0",  # Some files add 0.0 - ignore this.
                "vt 0.999110 0.501077",
                "vt 0.999455 0.750380",
                "f 1 2 3",
                "f 1 2 4 3 5",  # Polygons should be split into triangles
                "f 2/1/2 3/1/2 4/2/2",  # Texture/normals are loaded correctly.
                "f -1 -2 1",  # Negative indexing counts from the end.
            ]
        )

Nikhila Ravi's avatar
Nikhila Ravi committed
109
110
111
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
112

Nikhila Ravi's avatar
Nikhila Ravi committed
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
            verts, faces, aux = load_obj(Path(f.name))
            normals = aux.normals
            textures = aux.verts_uvs
            materials = aux.material_colors
            tex_maps = aux.texture_images

            expected_verts = torch.tensor(
                [
                    [0.1, 0.2, 0.3],
                    [0.2, 0.3, 0.4],
                    [0.3, 0.4, 0.5],
                    [0.4, 0.5, 0.6],
                    [0.5, 0.6, 0.7],
                ],
                dtype=torch.float32,
            )
            expected_faces = torch.tensor(
                [
                    [0, 1, 2],  # First face
                    [0, 1, 3],  # Second face (polygon)
                    [0, 3, 2],  # Second face (polygon)
                    [0, 2, 4],  # Second face (polygon)
                    [1, 2, 3],  # Third face (normals / texture)
                    [4, 3, 0],  # Fourth face (negative indices)
                ],
                dtype=torch.int64,
            )
            expected_normals = torch.tensor(
                [
                    [0.000000, 0.000000, -1.000000],
                    [-1.000000, -0.000000, -0.000000],
                    [-0.000000, -0.000000, 1.000000],
                ],
                dtype=torch.float32,
            )
            expected_textures = torch.tensor(
                [[0.749279, 0.501284], [0.999110, 0.501077], [0.999455, 0.750380]],
                dtype=torch.float32,
            )
            expected_faces_normals_idx = -(
                torch.ones_like(expected_faces, dtype=torch.int64)
            )
            expected_faces_normals_idx[4, :] = torch.tensor(
                [1, 1, 1], dtype=torch.int64
            )
            expected_faces_textures_idx = -(
                torch.ones_like(expected_faces, dtype=torch.int64)
            )
            expected_faces_textures_idx[4, :] = torch.tensor(
                [0, 0, 1], dtype=torch.int64
            )

            self.assertTrue(torch.all(verts == expected_verts))
            self.assertTrue(torch.all(faces.verts_idx == expected_faces))
            self.assertClose(normals, expected_normals)
            self.assertClose(textures, expected_textures)
            self.assertClose(faces.normals_idx, expected_faces_normals_idx)
            self.assertClose(faces.textures_idx, expected_faces_textures_idx)
            self.assertTrue(materials is None)
            self.assertTrue(tex_maps is None)
facebook-github-bot's avatar
facebook-github-bot committed
173

174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
    def test_load_obj_complex_pluggable(self):
        """
        This won't work on Windows due to the behavior of NamedTemporaryFile
        """
        obj_file = "\n".join(
            [
                "# this is a comment",  # Comments should be ignored.
                "v 0.1 0.2 0.3",
                "v 0.2 0.3 0.4",
                "v 0.3 0.4 0.5",
                "v 0.4 0.5 0.6",
                "vn 0.000000 0.000000 -1.000000",
                "vn -1.000000 -0.000000 -0.000000",
                "vn -0.000000 -0.000000 1.000000",  # Normals should not be ignored.
                "v 0.5 0.6 0.7",
                "vt 0.749279 0.501284 0.0",  # Some files add 0.0 - ignore this.
                "vt 0.999110 0.501077",
                "vt 0.999455 0.750380",
                "f 1 2 3",
                "f 1 2 4 3 5",  # Polygons should be split into triangles
                "f 2/1/2 3/1/2 4/2/2",  # Texture/normals are loaded correctly.
                "f -1 -2 1",  # Negative indexing counts from the end.
            ]
        )
        io = IO()
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
            mesh = io.load_mesh(f.name)
            mesh_from_path = io.load_mesh(Path(f.name))

        with NamedTemporaryFile(mode="w", suffix=".ply") as f:
            f.write(obj_file)
            f.flush()
            with self.assertRaisesRegex(ValueError, "Invalid file header."):
                io.load_mesh(f.name)

        expected_verts = torch.tensor(
            [
                [0.1, 0.2, 0.3],
                [0.2, 0.3, 0.4],
                [0.3, 0.4, 0.5],
                [0.4, 0.5, 0.6],
                [0.5, 0.6, 0.7],
            ],
            dtype=torch.float32,
        )
        expected_faces = torch.tensor(
            [
                [0, 1, 2],  # First face
                [0, 1, 3],  # Second face (polygon)
                [0, 3, 2],  # Second face (polygon)
                [0, 2, 4],  # Second face (polygon)
                [1, 2, 3],  # Third face (normals / texture)
                [4, 3, 0],  # Fourth face (negative indices)
            ],
            dtype=torch.int64,
        )
        self.assertClose(mesh.verts_padded(), expected_verts[None])
        self.assertClose(mesh.faces_padded(), expected_faces[None])
        self.assertClose(mesh_from_path.verts_padded(), expected_verts[None])
        self.assertClose(mesh_from_path.faces_padded(), expected_faces[None])
        self.assertIsNone(mesh.textures)

facebook-github-bot's avatar
facebook-github-bot committed
238
239
240
241
242
243
244
245
246
247
248
249
    def test_load_obj_normals_only(self):
        obj_file = "\n".join(
            [
                "v 0.1 0.2 0.3",
                "v 0.2 0.3 0.4",
                "v 0.3 0.4 0.5",
                "v 0.4 0.5 0.6",
                "vn 0.000000 0.000000 -1.000000",
                "vn -1.000000 -0.000000 -0.000000",
                "f 2//1 3//1 4//2",
            ]
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
250

251
        expected_faces_normals_idx = torch.tensor([[0, 0, 1]], dtype=torch.int64)
facebook-github-bot's avatar
facebook-github-bot committed
252
        expected_normals = torch.tensor(
253
            [[0.000000, 0.000000, -1.000000], [-1.000000, -0.000000, -0.000000]],
facebook-github-bot's avatar
facebook-github-bot committed
254
255
256
            dtype=torch.float32,
        )
        expected_verts = torch.tensor(
257
            [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
facebook-github-bot's avatar
facebook-github-bot committed
258
259
            dtype=torch.float32,
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277

        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()

            verts, faces, aux = load_obj(Path(f.name))
            normals = aux.normals
            textures = aux.verts_uvs
            materials = aux.material_colors
            tex_maps = aux.texture_images
            self.assertClose(faces.normals_idx, expected_faces_normals_idx)
            self.assertClose(normals, expected_normals)
            self.assertClose(verts, expected_verts)
            # Textures idx padded  with -1.
            self.assertClose(faces.textures_idx, torch.ones_like(faces.verts_idx) * -1)
            self.assertTrue(textures is None)
            self.assertTrue(materials is None)
            self.assertTrue(tex_maps is None)
facebook-github-bot's avatar
facebook-github-bot committed
278
279
280
281
282
283
284
285
286
287
288
289
290

    def test_load_obj_textures_only(self):
        obj_file = "\n".join(
            [
                "v 0.1 0.2 0.3",
                "v 0.2 0.3 0.4",
                "v 0.3 0.4 0.5",
                "v 0.4 0.5 0.6",
                "vt 0.999110 0.501077",
                "vt 0.999455 0.750380",
                "f 2/1 3/1 4/2",
            ]
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
291

292
        expected_faces_textures_idx = torch.tensor([[0, 0, 1]], dtype=torch.int64)
facebook-github-bot's avatar
facebook-github-bot committed
293
294
295
296
        expected_textures = torch.tensor(
            [[0.999110, 0.501077], [0.999455, 0.750380]], dtype=torch.float32
        )
        expected_verts = torch.tensor(
297
            [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
facebook-github-bot's avatar
facebook-github-bot committed
298
299
300
            dtype=torch.float32,
        )

Nikhila Ravi's avatar
Nikhila Ravi committed
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()

            verts, faces, aux = load_obj(Path(f.name))
            normals = aux.normals
            textures = aux.verts_uvs
            materials = aux.material_colors
            tex_maps = aux.texture_images

            self.assertClose(faces.textures_idx, expected_faces_textures_idx)
            self.assertClose(expected_textures, textures)
            self.assertClose(expected_verts, verts)
            self.assertTrue(
                torch.all(faces.normals_idx == -(torch.ones_like(faces.textures_idx)))
            )
            self.assertTrue(normals is None)
            self.assertTrue(materials is None)
            self.assertTrue(tex_maps is None)
facebook-github-bot's avatar
facebook-github-bot committed
320
321
322

    def test_load_obj_error_textures(self):
        obj_file = "\n".join(["vt 0.1"])
Nikhila Ravi's avatar
Nikhila Ravi committed
323
324
325
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
326

Nikhila Ravi's avatar
Nikhila Ravi committed
327
328
329
            with self.assertRaises(ValueError) as err:
                load_obj(Path(f.name))
            self.assertTrue("does not have 2 values" in str(err.exception))
facebook-github-bot's avatar
facebook-github-bot committed
330
331
332

    def test_load_obj_error_normals(self):
        obj_file = "\n".join(["vn 0.1"])
Nikhila Ravi's avatar
Nikhila Ravi committed
333
334
335
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
336

Nikhila Ravi's avatar
Nikhila Ravi committed
337
338
339
            with self.assertRaises(ValueError) as err:
                load_obj(Path(f.name))
            self.assertTrue("does not have 3 values" in str(err.exception))
facebook-github-bot's avatar
facebook-github-bot committed
340
341
342

    def test_load_obj_error_vertices(self):
        obj_file = "\n".join(["v 1"])
Nikhila Ravi's avatar
Nikhila Ravi committed
343
344
345
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
346

Nikhila Ravi's avatar
Nikhila Ravi committed
347
348
349
            with self.assertRaises(ValueError) as err:
                load_obj(Path(f.name))
            self.assertTrue("does not have 3 values" in str(err.exception))
facebook-github-bot's avatar
facebook-github-bot committed
350
351
352

    def test_load_obj_error_inconsistent_triplets(self):
        obj_file = "\n".join(["f 2//1 3/1 4/1/2"])
Nikhila Ravi's avatar
Nikhila Ravi committed
353
354
355
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
356

Nikhila Ravi's avatar
Nikhila Ravi committed
357
358
359
            with self.assertRaises(ValueError) as err:
                load_obj(Path(f.name))
            self.assertTrue("Vertex properties are inconsistent" in str(err.exception))
facebook-github-bot's avatar
facebook-github-bot committed
360
361
362

    def test_load_obj_error_too_many_vertex_properties(self):
        obj_file = "\n".join(["f 2/1/1/3"])
Nikhila Ravi's avatar
Nikhila Ravi committed
363
364
365
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
366

Nikhila Ravi's avatar
Nikhila Ravi committed
367
368
369
370
371
            with self.assertRaises(ValueError) as err:
                load_obj(Path(f.name))
            self.assertTrue(
                "Face vertices can only have 3 properties" in str(err.exception)
            )
facebook-github-bot's avatar
facebook-github-bot committed
372
373
374
375
376

    def test_load_obj_error_invalid_vertex_indices(self):
        obj_file = "\n".join(
            ["v 0.1 0.2 0.3", "v 0.1 0.2 0.3", "v 0.1 0.2 0.3", "f -2 5 1"]
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
377
378
379
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
380

Nikhila Ravi's avatar
Nikhila Ravi committed
381
382
            with self.assertWarnsRegex(UserWarning, "Faces have invalid indices"):
                load_obj(Path(f.name))
facebook-github-bot's avatar
facebook-github-bot committed
383
384
385
386
387
388
389
390
391
392
393
394
395

    def test_load_obj_error_invalid_normal_indices(self):
        obj_file = "\n".join(
            [
                "v 0.1 0.2 0.3",
                "v 0.1 0.2 0.3",
                "v 0.1 0.2 0.3",
                "vn 0.1 0.2 0.3",
                "vn 0.1 0.2 0.3",
                "vn 0.1 0.2 0.3",
                "f -2/2 2/4 1/1",
            ]
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
396
397
398
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
399

Nikhila Ravi's avatar
Nikhila Ravi committed
400
401
            with self.assertWarnsRegex(UserWarning, "Faces have invalid indices"):
                load_obj(Path(f.name))
facebook-github-bot's avatar
facebook-github-bot committed
402
403
404
405
406
407
408
409
410
411
412
413
414

    def test_load_obj_error_invalid_texture_indices(self):
        obj_file = "\n".join(
            [
                "v 0.1 0.2 0.3",
                "v 0.1 0.2 0.3",
                "v 0.1 0.2 0.3",
                "vt 0.1 0.2",
                "vt 0.1 0.2",
                "vt 0.1 0.2",
                "f -2//2 2//6 1//1",
            ]
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
415
416
417
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()
facebook-github-bot's avatar
facebook-github-bot committed
418

Nikhila Ravi's avatar
Nikhila Ravi committed
419
420
            with self.assertWarnsRegex(UserWarning, "Faces have invalid indices"):
                load_obj(Path(f.name))
421
422
423
424
425
426

    def test_save_obj_invalid_shapes(self):
        # Invalid vertices shape
        with self.assertRaises(ValueError) as error:
            verts = torch.FloatTensor([[0.1, 0.2, 0.3, 0.4]])  # (V, 4)
            faces = torch.LongTensor([[0, 1, 2]])
Nikhila Ravi's avatar
Nikhila Ravi committed
427
428
            with NamedTemporaryFile(mode="w", suffix=".obj") as f:
                save_obj(Path(f.name), verts, faces)
429
430
431
        expected_message = (
            "Argument 'verts' should either be empty or of shape (num_verts, 3)."
        )
432
433
434
435
436
437
        self.assertTrue(expected_message, error.exception)

        # Invalid faces shape
        with self.assertRaises(ValueError) as error:
            verts = torch.FloatTensor([[0.1, 0.2, 0.3]])
            faces = torch.LongTensor([[0, 1, 2, 3]])  # (F, 4)
Nikhila Ravi's avatar
Nikhila Ravi committed
438
439
            with NamedTemporaryFile(mode="w", suffix=".obj") as f:
                save_obj(Path(f.name), verts, faces)
440
441
442
        expected_message = (
            "Argument 'faces' should either be empty or of shape (num_faces, 3)."
        )
443
444
445
446
447
448
449
        self.assertTrue(expected_message, error.exception)

    def test_save_obj_invalid_indices(self):
        message_regex = "Faces have invalid indices"
        verts = torch.FloatTensor([[0.1, 0.2, 0.3]])
        faces = torch.LongTensor([[0, 1, 2]])
        with self.assertWarnsRegex(UserWarning, message_regex):
Nikhila Ravi's avatar
Nikhila Ravi committed
450
451
            with NamedTemporaryFile(mode="w", suffix=".obj") as f:
                save_obj(Path(f.name), verts, faces)
452
453
454

        faces = torch.LongTensor([[-1, 0, 1]])
        with self.assertWarnsRegex(UserWarning, message_regex):
Nikhila Ravi's avatar
Nikhila Ravi committed
455
456
            with NamedTemporaryFile(mode="w", suffix=".obj") as f:
                save_obj(Path(f.name), verts, faces)
457
458

    def _test_save_load(self, verts, faces):
Nikhila Ravi's avatar
Nikhila Ravi committed
459
460
461
462
463
464
465
466
467
468
469
470
471
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            file_path = Path(f.name)
            save_obj(file_path, verts, faces)
            f.flush()

            expected_verts, expected_faces = verts, faces
            if not len(expected_verts):  # Always compare with a (V, 3) tensor
                expected_verts = torch.zeros(size=(0, 3), dtype=torch.float32)
            if not len(expected_faces):  # Always compare with an (F, 3) tensor
                expected_faces = torch.zeros(size=(0, 3), dtype=torch.int64)
            actual_verts, actual_faces, _ = load_obj(file_path)
            self.assertClose(expected_verts, actual_verts)
            self.assertClose(expected_faces, actual_faces.verts_idx)
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509

    def test_empty_save_load_obj(self):
        # Vertices + empty faces
        verts = torch.FloatTensor([[0.1, 0.2, 0.3]])
        faces = torch.LongTensor([])
        self._test_save_load(verts, faces)

        faces = torch.zeros(size=(0, 3), dtype=torch.int64)
        self._test_save_load(verts, faces)

        # Faces + empty vertices
        message_regex = "Faces have invalid indices"
        verts = torch.FloatTensor([])
        faces = torch.LongTensor([[0, 1, 2]])
        with self.assertWarnsRegex(UserWarning, message_regex):
            self._test_save_load(verts, faces)

        verts = torch.zeros(size=(0, 3), dtype=torch.float32)
        with self.assertWarnsRegex(UserWarning, message_regex):
            self._test_save_load(verts, faces)

        # Empty vertices + empty faces
        message_regex = "Empty 'verts' and 'faces' arguments provided"
        verts0 = torch.FloatTensor([])
        faces0 = torch.LongTensor([])
        with self.assertWarnsRegex(UserWarning, message_regex):
            self._test_save_load(verts0, faces0)

        faces3 = torch.zeros(size=(0, 3), dtype=torch.int64)
        with self.assertWarnsRegex(UserWarning, message_regex):
            self._test_save_load(verts0, faces3)

        verts3 = torch.zeros(size=(0, 3), dtype=torch.float32)
        with self.assertWarnsRegex(UserWarning, message_regex):
            self._test_save_load(verts3, faces0)

        with self.assertWarnsRegex(UserWarning, message_regex):
            self._test_save_load(verts3, faces3)
facebook-github-bot's avatar
facebook-github-bot committed
510
511
512

    def test_save_obj(self):
        verts = torch.tensor(
513
            [[0.01, 0.2, 0.301], [0.2, 0.03, 0.408], [0.3, 0.4, 0.05], [0.6, 0.7, 0.8]],
facebook-github-bot's avatar
facebook-github-bot committed
514
515
516
517
518
            dtype=torch.float32,
        )
        faces = torch.tensor(
            [[0, 2, 1], [0, 1, 2], [3, 2, 1], [3, 1, 0]], dtype=torch.int64
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            save_obj(Path(f.name), verts, faces, decimal_places=2)

            expected_file = "\n".join(
                [
                    "v 0.01 0.20 0.30",
                    "v 0.20 0.03 0.41",
                    "v 0.30 0.40 0.05",
                    "v 0.60 0.70 0.80",
                    "f 1 3 2",
                    "f 1 2 3",
                    "f 4 3 2",
                    "f 4 2 1",
                ]
            )
            actual_file = open(Path(f.name), "r")
            self.assertEqual(actual_file.read(), expected_file)
facebook-github-bot's avatar
facebook-github-bot committed
536
537
538

    def test_load_mtl(self):
        obj_filename = "cow_mesh/cow.obj"
539
        filename = os.path.join(TUTORIAL_DATA_DIR, obj_filename)
facebook-github-bot's avatar
facebook-github-bot committed
540
541
542
543
544
545
546
547
548
549
550
551
552
        verts, faces, aux = load_obj(filename)
        materials = aux.material_colors
        tex_maps = aux.texture_images

        dtype = torch.float32
        expected_materials = {
            "material_1": {
                "ambient_color": torch.tensor([1.0, 1.0, 1.0], dtype=dtype),
                "diffuse_color": torch.tensor([1.0, 1.0, 1.0], dtype=dtype),
                "specular_color": torch.tensor([0.0, 0.0, 0.0], dtype=dtype),
                "shininess": torch.tensor([10.0], dtype=dtype),
            }
        }
553
554
555
        # Texture atlas is not created as `create_texture_atlas=True` was
        # not set in the load_obj args
        self.assertTrue(aux.texture_atlas is None)
facebook-github-bot's avatar
facebook-github-bot committed
556
557
558
559
560
561
562
563
564
565
        # Check that there is an image with material name material_1.
        self.assertTrue(tuple(tex_maps.keys()) == ("material_1",))
        self.assertTrue(torch.is_tensor(tuple(tex_maps.values())[0]))
        self.assertTrue(
            torch.all(faces.materials_idx == torch.zeros(len(faces.verts_idx)))
        )

        # Check all keys and values in dictionary are the same.
        for n1, n2 in zip(materials.keys(), expected_materials.keys()):
            self.assertTrue(n1 == n2)
566
            for k1, k2 in zip(materials[n1].keys(), expected_materials[n2].keys()):
facebook-github-bot's avatar
facebook-github-bot committed
567
                self.assertTrue(
568
                    torch.allclose(materials[n1][k1], expected_materials[n2][k2])
facebook-github-bot's avatar
facebook-github-bot committed
569
570
                )

571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
    def test_load_mtl_with_spaces_in_resource_filename(self):
        """
        Check that the texture image for materials in mtl files
        is loaded correctly even if there is a space in the file name
        e.g. material 1.png
        """
        mtl_file = "\n".join(
            [
                "newmtl material_1",
                "map_Kd material 1.png",
                "Ka 1.000 1.000 1.000",  # white
                "Kd 1.000 1.000 1.000",  # white
                "Ks 0.000 0.000 0.000",  # black
                "Ns 10.0",
            ]
        )
Nikhila Ravi's avatar
Nikhila Ravi committed
587
588
589
        with NamedTemporaryFile(mode="w", suffix=".mtl") as f:
            f.write(mtl_file)
            f.flush()
590

Nikhila Ravi's avatar
Nikhila Ravi committed
591
592
593
            material_properties, texture_files = _parse_mtl(
                Path(f.name), path_manager=PathManager(), device="cpu"
            )
594

Nikhila Ravi's avatar
Nikhila Ravi committed
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
            dtype = torch.float32
            expected_materials = {
                "material_1": {
                    "ambient_color": torch.tensor([1.0, 1.0, 1.0], dtype=dtype),
                    "diffuse_color": torch.tensor([1.0, 1.0, 1.0], dtype=dtype),
                    "specular_color": torch.tensor([0.0, 0.0, 0.0], dtype=dtype),
                    "shininess": torch.tensor([10.0], dtype=dtype),
                }
            }
            # Check that there is a material with name material_1
            self.assertTrue(tuple(texture_files.keys()) == ("material_1",))
            # Check that there is an image with name material 1.png
            self.assertTrue(texture_files["material_1"] == "material 1.png")

            # Check all keys and values in dictionary are the same.
            for n1, n2 in zip(material_properties.keys(), expected_materials.keys()):
                self.assertTrue(n1 == n2)
                for k1, k2 in zip(
                    material_properties[n1].keys(), expected_materials[n2].keys()
                ):
                    self.assertTrue(
                        torch.allclose(
                            material_properties[n1][k1], expected_materials[n2][k2]
                        )
619
620
                    )

621
622
623
    def test_load_mtl_texture_atlas_compare_softras(self):
        # Load saved texture atlas created with SoftRas.
        device = torch.device("cuda:0")
624
625
        obj_filename = TUTORIAL_DATA_DIR / "cow_mesh/cow.obj"
        expected_atlas_fname = DATA_DIR / "cow_texture_atlas_softras.pt"
626
627
628
629

        # Note, the reference texture atlas generated using SoftRas load_obj function
        # is too large to check in to the repo. Download the file to run the test locally.
        if not os.path.exists(expected_atlas_fname):
Patrick Labatut's avatar
Patrick Labatut committed
630
631
632
633
            url = (
                "https://dl.fbaipublicfiles.com/pytorch3d/data/"
                "tests/cow_texture_atlas_softras.pt"
            )
634
            msg = (
Patrick Labatut's avatar
Patrick Labatut committed
635
636
                "cow_texture_atlas_softras.pt not found, download from %s, "
                "save it at the path %s, and rerun" % (url, expected_atlas_fname)
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
            )
            warnings.warn(msg)
            return True

        expected_atlas = torch.load(expected_atlas_fname)
        _, _, aux = load_obj(
            obj_filename,
            load_textures=True,
            device=device,
            create_texture_atlas=True,
            texture_atlas_size=15,
            texture_wrap="repeat",
        )

        self.assertClose(expected_atlas, aux.texture_atlas, atol=5e-5)

Georgia Gkioxari's avatar
Georgia Gkioxari committed
653
654
    def test_load_mtl_noload(self):
        obj_filename = "cow_mesh/cow.obj"
655
        filename = os.path.join(TUTORIAL_DATA_DIR, obj_filename)
Georgia Gkioxari's avatar
Georgia Gkioxari committed
656
657
658
659
660
        verts, faces, aux = load_obj(filename, load_textures=False)

        self.assertTrue(aux.material_colors is None)
        self.assertTrue(aux.texture_images is None)

661
662
663
664
665
666
667
668
669
670
671
672
    def test_load_no_usemtl(self):
        obj_filename = "missing_usemtl/cow.obj"
        # obj_filename has no "usemtl material_1" line
        filename = os.path.join(DATA_DIR, obj_filename)
        # TexturesUV type
        mesh = IO().load_mesh(filename)
        self.assertIsNotNone(mesh.textures)

        verts, faces, aux = load_obj(filename)
        self.assertTrue("material_1" in aux.material_colors)
        self.assertTrue("material_1" in aux.texture_images)

facebook-github-bot's avatar
facebook-github-bot committed
673
674
675
676
677
678
679
680
681
682
683
684
685
686
    def test_load_mtl_fail(self):
        # Faces have a material
        obj_file = "\n".join(
            [
                "v 0.1 0.2 0.3",
                "v 0.2 0.3 0.4",
                "v 0.3 0.4 0.5",
                "v 0.4 0.5 0.6",
                "usemtl material_1",
                "f 1 2 3",
                "f 1 2 4",
            ]
        )

Nikhila Ravi's avatar
Nikhila Ravi committed
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
        with NamedTemporaryFile(mode="w", suffix=".obj") as f:
            f.write(obj_file)
            f.flush()

            with self.assertWarnsRegex(UserWarning, "No mtl file provided"):
                verts, faces, aux = load_obj(Path(f.name))

            expected_verts = torch.tensor(
                [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
                dtype=torch.float32,
            )
            expected_faces = torch.tensor([[0, 1, 2], [0, 1, 3]], dtype=torch.int64)
            self.assertTrue(torch.allclose(verts, expected_verts))
            self.assertTrue(torch.allclose(faces.verts_idx, expected_faces))
            self.assertTrue(aux.material_colors is None)
            self.assertTrue(aux.texture_images is None)
            self.assertTrue(aux.normals is None)
            self.assertTrue(aux.verts_uvs is None)
facebook-github-bot's avatar
facebook-github-bot committed
705

Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
706
    def test_load_obj_mtl_no_image(self):
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
        obj_filename = "obj_mtl_no_image/model.obj"
        filename = os.path.join(DATA_DIR, obj_filename)
        R = 8
        verts, faces, aux = load_obj(
            filename,
            load_textures=True,
            create_texture_atlas=True,
            texture_atlas_size=R,
            texture_wrap=None,
        )

        expected_verts = torch.tensor(
            [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
            dtype=torch.float32,
        )
        expected_faces = torch.tensor([[0, 1, 2], [0, 1, 3]], dtype=torch.int64)
        self.assertTrue(torch.allclose(verts, expected_verts))
        self.assertTrue(torch.allclose(faces.verts_idx, expected_faces))

        # Check that the material diffuse color has been assigned to all the
        # values in the texture atlas.
        expected_atlas = torch.tensor([0.5, 0.0, 0.0], dtype=torch.float32)
        expected_atlas = expected_atlas[None, None, None, :].expand(2, R, R, -1)
        self.assertTrue(torch.allclose(aux.texture_atlas, expected_atlas))
731
732
        self.assertEqual(len(aux.material_colors.keys()), 1)
        self.assertEqual(list(aux.material_colors.keys()), ["material_1"])
733

facebook-github-bot's avatar
facebook-github-bot committed
734
735
736
    def test_load_obj_missing_texture(self):
        obj_filename = "missing_files_obj/model.obj"
        filename = os.path.join(DATA_DIR, obj_filename)
737
        with self.assertWarnsRegex(UserWarning, "Texture file does not exist"):
facebook-github-bot's avatar
facebook-github-bot committed
738
739
740
            verts, faces, aux = load_obj(filename)

        expected_verts = torch.tensor(
741
            [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
facebook-github-bot's avatar
facebook-github-bot committed
742
743
744
745
746
747
            dtype=torch.float32,
        )
        expected_faces = torch.tensor([[0, 1, 2], [0, 1, 3]], dtype=torch.int64)
        self.assertTrue(torch.allclose(verts, expected_verts))
        self.assertTrue(torch.allclose(faces.verts_idx, expected_faces))

Georgia Gkioxari's avatar
Georgia Gkioxari committed
748
749
750
751
752
753
    def test_load_obj_missing_texture_noload(self):
        obj_filename = "missing_files_obj/model.obj"
        filename = os.path.join(DATA_DIR, obj_filename)
        verts, faces, aux = load_obj(filename, load_textures=False)

        expected_verts = torch.tensor(
754
            [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
Georgia Gkioxari's avatar
Georgia Gkioxari committed
755
756
757
758
759
760
761
762
            dtype=torch.float32,
        )
        expected_faces = torch.tensor([[0, 1, 2], [0, 1, 3]], dtype=torch.int64)
        self.assertTrue(torch.allclose(verts, expected_verts))
        self.assertTrue(torch.allclose(faces.verts_idx, expected_faces))
        self.assertTrue(aux.material_colors is None)
        self.assertTrue(aux.texture_images is None)

facebook-github-bot's avatar
facebook-github-bot committed
763
764
765
    def test_load_obj_missing_mtl(self):
        obj_filename = "missing_files_obj/model2.obj"
        filename = os.path.join(DATA_DIR, obj_filename)
766
        with self.assertWarnsRegex(UserWarning, "Mtl file does not exist"):
facebook-github-bot's avatar
facebook-github-bot committed
767
768
769
            verts, faces, aux = load_obj(filename)

        expected_verts = torch.tensor(
770
            [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
facebook-github-bot's avatar
facebook-github-bot committed
771
772
773
774
775
776
            dtype=torch.float32,
        )
        expected_faces = torch.tensor([[0, 1, 2], [0, 1, 3]], dtype=torch.int64)
        self.assertTrue(torch.allclose(verts, expected_verts))
        self.assertTrue(torch.allclose(faces.verts_idx, expected_faces))

Georgia Gkioxari's avatar
Georgia Gkioxari committed
777
778
779
780
781
782
    def test_load_obj_missing_mtl_noload(self):
        obj_filename = "missing_files_obj/model2.obj"
        filename = os.path.join(DATA_DIR, obj_filename)
        verts, faces, aux = load_obj(filename, load_textures=False)

        expected_verts = torch.tensor(
783
            [[0.1, 0.2, 0.3], [0.2, 0.3, 0.4], [0.3, 0.4, 0.5], [0.4, 0.5, 0.6]],
Georgia Gkioxari's avatar
Georgia Gkioxari committed
784
785
786
787
788
789
790
791
            dtype=torch.float32,
        )
        expected_faces = torch.tensor([[0, 1, 2], [0, 1, 3]], dtype=torch.int64)
        self.assertTrue(torch.allclose(verts, expected_verts))
        self.assertTrue(torch.allclose(faces.verts_idx, expected_faces))
        self.assertTrue(aux.material_colors is None)
        self.assertTrue(aux.texture_images is None)

Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
792
    def test_join_meshes_as_batch(self):
793
        """
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
794
795
        Test that join_meshes_as_batch and load_objs_as_meshes are consistent
        with single meshes.
796
797
798
799
800
801
802
803
804
805
806
807
808
809
        """

        def check_triple(mesh, mesh3):
            """
            Verify that mesh3 is three copies of mesh.
            """

            def check_item(x, y):
                self.assertEqual(x is None, y is None)
                if x is not None:
                    self.assertClose(torch.cat([x, x, x]), y)

            check_item(mesh.verts_padded(), mesh3.verts_padded())
            check_item(mesh.faces_padded(), mesh3.faces_padded())
Nikhila Ravi's avatar
Nikhila Ravi committed
810

811
            if mesh.textures is not None:
Nikhila Ravi's avatar
Nikhila Ravi committed
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
                if isinstance(mesh.textures, TexturesUV):
                    check_item(
                        mesh.textures.faces_uvs_padded(),
                        mesh3.textures.faces_uvs_padded(),
                    )
                    check_item(
                        mesh.textures.verts_uvs_padded(),
                        mesh3.textures.verts_uvs_padded(),
                    )
                    check_item(
                        mesh.textures.maps_padded(), mesh3.textures.maps_padded()
                    )
                elif isinstance(mesh.textures, TexturesVertex):
                    check_item(
                        mesh.textures.verts_features_padded(),
                        mesh3.textures.verts_features_padded(),
                    )
                elif isinstance(mesh.textures, TexturesAtlas):
                    check_item(
                        mesh.textures.atlas_padded(), mesh3.textures.atlas_padded()
                    )
833

834
        obj_filename = TUTORIAL_DATA_DIR / "cow_mesh/cow.obj"
835
836
837
838

        mesh = load_objs_as_meshes([obj_filename])
        mesh3 = load_objs_as_meshes([obj_filename, obj_filename, obj_filename])
        check_triple(mesh, mesh3)
839
        self.assertTupleEqual(mesh.textures.maps_padded().shape, (1, 1024, 1024, 3))
840

841
842
843
844
845
846
        # Try mismatched texture map sizes, which needs a call to interpolate()
        mesh2048 = mesh.clone()
        maps = mesh.textures.maps_padded()
        mesh2048.textures._maps_padded = torch.cat([maps, maps], dim=1)
        join_meshes_as_batch([mesh.to("cuda:0"), mesh2048.to("cuda:0")])

847
848
849
850
851
852
853
        mesh_notex = load_objs_as_meshes([obj_filename], load_textures=False)
        mesh3_notex = load_objs_as_meshes(
            [obj_filename, obj_filename, obj_filename], load_textures=False
        )
        check_triple(mesh_notex, mesh3_notex)
        self.assertIsNone(mesh_notex.textures)

Nikhila Ravi's avatar
Nikhila Ravi committed
854
        # meshes with vertex texture, join into a batch.
855
856
        verts = torch.randn((4, 3), dtype=torch.float32)
        faces = torch.tensor([[2, 1, 0], [3, 1, 0]], dtype=torch.int64)
Nikhila Ravi's avatar
Nikhila Ravi committed
857
858
859
        vert_tex = torch.ones_like(verts)
        rgb_tex = TexturesVertex(verts_features=[vert_tex])
        mesh_rgb = Meshes(verts=[verts], faces=[faces], textures=rgb_tex)
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
860
        mesh_rgb3 = join_meshes_as_batch([mesh_rgb, mesh_rgb, mesh_rgb])
861
        check_triple(mesh_rgb, mesh_rgb3)
862
863
864
865
866
        nums_rgb = mesh_rgb.textures._num_verts_per_mesh
        nums_rgb3 = mesh_rgb3.textures._num_verts_per_mesh
        self.assertEqual(type(nums_rgb), list)
        self.assertEqual(type(nums_rgb3), list)
        self.assertListEqual(nums_rgb * 3, nums_rgb3)
867

Nikhila Ravi's avatar
Nikhila Ravi committed
868
869
870
871
872
873
874
875
876
        # meshes with texture atlas, join into a batch.
        device = "cuda:0"
        atlas = torch.rand((2, 4, 4, 3), dtype=torch.float32, device=device)
        atlas_tex = TexturesAtlas(atlas=[atlas])
        mesh_atlas = Meshes(verts=[verts], faces=[faces], textures=atlas_tex)
        mesh_atlas3 = join_meshes_as_batch([mesh_atlas, mesh_atlas, mesh_atlas])
        check_triple(mesh_atlas, mesh_atlas3)

        # Test load multiple meshes with textures into a batch.
877
        teapot_obj = TUTORIAL_DATA_DIR / "teapot.obj"
878
879
        mesh_teapot = load_objs_as_meshes([teapot_obj])
        teapot_verts, teapot_faces = mesh_teapot.get_mesh_verts_faces(0)
880
        mix_mesh = load_objs_as_meshes([obj_filename, teapot_obj], load_textures=False)
881
882
883
884
885
886
        self.assertEqual(len(mix_mesh), 2)
        self.assertClose(mix_mesh.verts_list()[0], mesh.verts_list()[0])
        self.assertClose(mix_mesh.faces_list()[0], mesh.faces_list()[0])
        self.assertClose(mix_mesh.verts_list()[1], teapot_verts)
        self.assertClose(mix_mesh.faces_list()[1], teapot_faces)

Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
887
        cow3_tea = join_meshes_as_batch([mesh3, mesh_teapot], include_textures=False)
888
889
890
891
892
        self.assertEqual(len(cow3_tea), 4)
        check_triple(mesh_notex, cow3_tea[:3])
        self.assertClose(cow3_tea.verts_list()[3], mesh_teapot.verts_list()[0])
        self.assertClose(cow3_tea.faces_list()[3], mesh_teapot.faces_list()[0])

Nikhila Ravi's avatar
Nikhila Ravi committed
893
894
895
        # Check error raised if all meshes in the batch don't have the same texture type
        with self.assertRaisesRegex(ValueError, "same type of texture"):
            join_meshes_as_batch([mesh_atlas, mesh_rgb, mesh_atlas])
896

Nikhila Ravi's avatar
Nikhila Ravi committed
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
    def test_save_obj_with_texture(self):
        verts = torch.tensor(
            [[0.01, 0.2, 0.301], [0.2, 0.03, 0.408], [0.3, 0.4, 0.05], [0.6, 0.7, 0.8]],
            dtype=torch.float32,
        )
        faces = torch.tensor(
            [[0, 2, 1], [0, 1, 2], [3, 2, 1], [3, 1, 0]], dtype=torch.int64
        )
        verts_uvs = torch.tensor(
            [[0.02, 0.5], [0.3, 0.03], [0.32, 0.12], [0.36, 0.17]],
            dtype=torch.float32,
        )
        faces_uvs = faces
        texture_map = torch.randint(size=(2, 2, 3), high=255) / 255.0

        with TemporaryDirectory() as temp_dir:
            obj_file = os.path.join(temp_dir, "mesh.obj")
            save_obj(
                obj_file,
                verts,
                faces,
                decimal_places=2,
                verts_uvs=verts_uvs,
                faces_uvs=faces_uvs,
                texture_map=texture_map,
            )

            expected_obj_file = "\n".join(
                [
                    "",
                    "mtllib mesh.mtl",
                    "usemtl mesh",
                    "",
                    "v 0.01 0.20 0.30",
                    "v 0.20 0.03 0.41",
                    "v 0.30 0.40 0.05",
                    "v 0.60 0.70 0.80",
                    "vt 0.02 0.50",
                    "vt 0.30 0.03",
                    "vt 0.32 0.12",
                    "vt 0.36 0.17",
                    "f 1/1 3/3 2/2",
                    "f 1/1 2/2 3/3",
                    "f 4/4 3/3 2/2",
                    "f 4/4 2/2 1/1",
                ]
            )
            expected_mtl_file = "\n".join(["newmtl mesh", "map_Kd mesh.png", ""])

            # Check there are only 3 files in the temp dir
            tempfiles = ["mesh.obj", "mesh.png", "mesh.mtl"]
            tempfiles_dir = os.listdir(temp_dir)
            self.assertEqual(Counter(tempfiles), Counter(tempfiles_dir))

            # Check the obj file is saved correctly
            actual_file = open(obj_file, "r")
            self.assertEqual(actual_file.read(), expected_obj_file)

            # Check the mtl file is saved correctly
            mtl_file_name = os.path.join(temp_dir, "mesh.mtl")
            mtl_file = open(mtl_file_name, "r")
            self.assertEqual(mtl_file.read(), expected_mtl_file)

            # Check the texture image file is saved correctly
            texture_image = load_rgb_image("mesh.png", temp_dir)
            self.assertClose(texture_image, texture_map)

    def test_save_obj_with_texture_errors(self):
        verts = torch.tensor(
            [[0.01, 0.2, 0.301], [0.2, 0.03, 0.408], [0.3, 0.4, 0.05], [0.6, 0.7, 0.8]],
            dtype=torch.float32,
        )
        faces = torch.tensor(
            [[0, 2, 1], [0, 1, 2], [3, 2, 1], [3, 1, 0]], dtype=torch.int64
        )
        verts_uvs = torch.tensor(
            [[0.02, 0.5], [0.3, 0.03], [0.32, 0.12], [0.36, 0.17]],
            dtype=torch.float32,
        )
        faces_uvs = faces
        texture_map = torch.randint(size=(2, 2, 3), high=255)

        expected_obj_file = "\n".join(
            [
                "v 0.01 0.20 0.30",
                "v 0.20 0.03 0.41",
                "v 0.30 0.40 0.05",
                "v 0.60 0.70 0.80",
                "f 1 3 2",
                "f 1 2 3",
                "f 4 3 2",
                "f 4 2 1",
            ]
        )
        with TemporaryDirectory() as temp_dir:
            obj_file = os.path.join(temp_dir, "mesh.obj")

            # If only one of verts_uvs/faces_uvs/texture_map is provided
            # then textures are not saved
            for arg in [
                {"verts_uvs": verts_uvs},
                {"faces_uvs": faces_uvs},
                {"texture_map": texture_map},
            ]:
                save_obj(
                    obj_file,
                    verts,
                    faces,
                    decimal_places=2,
                    **arg,
                )

                # Check there is only 1 file in the temp dir
                tempfiles = ["mesh.obj"]
                tempfiles_dir = os.listdir(temp_dir)
                self.assertEqual(tempfiles, tempfiles_dir)

                # Check the obj file is saved correctly
                actual_file = open(obj_file, "r")
                self.assertEqual(actual_file.read(), expected_obj_file)

        obj_file = StringIO()
        with self.assertRaises(ValueError):
            save_obj(
                obj_file,
                verts,
                faces,
                decimal_places=2,
                verts_uvs=verts_uvs,
                faces_uvs=faces_uvs[..., 2],  # Incorrect shape
                texture_map=texture_map,
            )

        with self.assertRaises(ValueError):
            save_obj(
                obj_file,
                verts,
                faces,
                decimal_places=2,
                verts_uvs=verts_uvs[..., 0],  # Incorrect shape
                faces_uvs=faces_uvs,
                texture_map=texture_map,
            )

        with self.assertRaises(ValueError):
            save_obj(
                obj_file,
                verts,
                faces,
                decimal_places=2,
                verts_uvs=verts_uvs,
                faces_uvs=faces_uvs,
                texture_map=texture_map[..., 1],  # Incorrect shape
            )

1052
    @staticmethod
1053
    def _bm_save_obj(verts: torch.Tensor, faces: torch.Tensor, decimal_places: int):
1054
1055
1056
        return lambda: save_obj(StringIO(), verts, faces, decimal_places)

    @staticmethod
1057
    def _bm_load_obj(verts: torch.Tensor, faces: torch.Tensor, decimal_places: int):
1058
1059
1060
1061
1062
1063
        f = StringIO()
        save_obj(f, verts, faces, decimal_places)
        s = f.getvalue()
        # Recreate stream so it's unaffected by how it was created.
        return lambda: load_obj(StringIO(s))

facebook-github-bot's avatar
facebook-github-bot committed
1064
    @staticmethod
1065
    def bm_save_simple_obj_with_init(V: int, F: int):
1066
1067
1068
        verts = torch.tensor(V * [[0.11, 0.22, 0.33]]).view(-1, 3)
        faces = torch.tensor(F * [[1, 2, 3]]).view(-1, 3)
        return TestMeshObjIO._bm_save_obj(verts, faces, decimal_places=2)
facebook-github-bot's avatar
facebook-github-bot committed
1069
1070

    @staticmethod
1071
    def bm_load_simple_obj_with_init(V: int, F: int):
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
        verts = torch.tensor(V * [[0.1, 0.2, 0.3]]).view(-1, 3)
        faces = torch.tensor(F * [[1, 2, 3]]).view(-1, 3)
        return TestMeshObjIO._bm_load_obj(verts, faces, decimal_places=2)

    @staticmethod
    def bm_save_complex_obj(N: int):
        meshes = torus(r=0.25, R=1.0, sides=N, rings=2 * N)
        [verts], [faces] = meshes.verts_list(), meshes.faces_list()
        return TestMeshObjIO._bm_save_obj(verts, faces, decimal_places=5)

    @staticmethod
    def bm_load_complex_obj(N: int):
        meshes = torus(r=0.25, R=1.0, sides=N, rings=2 * N)
        [verts], [faces] = meshes.verts_list(), meshes.faces_list()
        return TestMeshObjIO._bm_load_obj(verts, faces, decimal_places=5)
1087
1088
1089
1090
1091

    @staticmethod
    def bm_load_texture_atlas(R: int):
        device = torch.device("cuda:0")
        torch.cuda.set_device(device)
1092
1093
        data_dir = "/data/users/nikhilar/fbsource/fbcode/vision/fair/pytorch3d/docs/"
        obj_filename = os.path.join(data_dir, "tutorials/data/cow_mesh/cow.obj")
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
        torch.cuda.synchronize()

        def load():
            load_obj(
                obj_filename,
                load_textures=True,
                device=device,
                create_texture_atlas=True,
                texture_atlas_size=R,
            )
            torch.cuda.synchronize()

        return load

    @staticmethod
    def bm_bilinear_sampling_vectorized(S: int, F: int, R: int):
        device = torch.device("cuda:0")
        torch.cuda.set_device(device)
        image = torch.rand((S, S, 3))
        grid = torch.rand((F, R, R, 2))
        torch.cuda.synchronize()

        def load():
            _bilinear_interpolation_vectorized(image, grid)
            torch.cuda.synchronize()

        return load

    @staticmethod
    def bm_bilinear_sampling_grid_sample(S: int, F: int, R: int):
        device = torch.device("cuda:0")
        torch.cuda.set_device(device)
        image = torch.rand((S, S, 3))
        grid = torch.rand((F, R, R, 2))
        torch.cuda.synchronize()

        def load():
            _bilinear_interpolation_grid_sample(image, grid)
            torch.cuda.synchronize()

        return load