render_textured_meshes.ipynb 23.9 KB
Newer Older
facebook-github-bot's avatar
facebook-github-bot committed
1
2
3
4
{
 "cells": [
  {
   "cell_type": "code",
5
   "execution_count": null,
6
7
8
9
10
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "_Ip8kp4TfBLZ"
   },
facebook-github-bot's avatar
facebook-github-bot committed
11
12
   "outputs": [],
   "source": [
13
    "# Copyright (c) Meta Platforms, Inc. and affiliates. All rights reserved."
facebook-github-bot's avatar
facebook-github-bot committed
14
15
16
17
   ]
  },
  {
   "cell_type": "markdown",
18
19
20
21
   "metadata": {
    "colab_type": "text",
    "id": "kuXHJv44fBLe"
   },
facebook-github-bot's avatar
facebook-github-bot committed
22
23
24
25
26
27
28
29
30
31
32
33
34
   "source": [
    "# Render a textured mesh\n",
    "\n",
    "This tutorial shows how to:\n",
    "- load a mesh and textures from an `.obj` file. \n",
    "- set up a renderer \n",
    "- render the mesh \n",
    "- vary the rendering settings such as lighting and camera position\n",
    "- use the batching features of the pytorch3d API to render the mesh from different viewpoints"
   ]
  },
  {
   "cell_type": "markdown",
35
36
37
38
   "metadata": {
    "colab_type": "text",
    "id": "Bnj3THhzfBLf"
   },
facebook-github-bot's avatar
facebook-github-bot committed
39
   "source": [
40
41
42
43
44
45
46
47
48
49
    "## 0. Install and Import modules"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "okLalbR_g7NS"
   },
   "source": [
50
    "Ensure `torch` and `torchvision` are installed. If `pytorch3d` is not installed, install it using the following cell:"
facebook-github-bot's avatar
facebook-github-bot committed
51
52
53
54
   ]
  },
  {
   "cell_type": "code",
55
   "execution_count": null,
56
57
58
59
60
61
62
63
64
65
66
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 717
    },
    "colab_type": "code",
    "id": "musUWTglgxSB",
    "outputId": "16d1a1b2-3f7f-43ed-ca28-a4d236cc0572"
   },
   "outputs": [],
   "source": [
67
    "import os\n",
68
    "import sys\n",
69
    "import torch\n",
Roeia Kishk's avatar
Roeia Kishk committed
70
    "import subprocess\n",
71
72
73
74
75
76
    "need_pytorch3d=False\n",
    "try:\n",
    "    import pytorch3d\n",
    "except ModuleNotFoundError:\n",
    "    need_pytorch3d=True\n",
    "if need_pytorch3d:\n",
Roeia Kishk's avatar
Roeia Kishk committed
77
78
79
80
81
82
83
84
85
    "    pyt_version_str=torch.__version__.split(\"+\")[0].replace(\".\", \"\")\n",
    "    version_str=\"\".join([\n",
    "        f\"py3{sys.version_info.minor}_cu\",\n",
    "        torch.version.cuda.replace(\".\",\"\"),\n",
    "        f\"_pyt{pyt_version_str}\"\n",
    "    ])\n",
    "    !pip install fvcore iopath\n",
    "    if sys.platform.startswith(\"linux\"):\n",
    "        print(\"Trying to install wheel for PyTorch3D\")\n",
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
86
    "        !pip install --no-index --no-cache-dir pytorch3d -f https://dl.fbaipublicfiles.com/pytorch3d/packaging/wheels/{version_str}/download.html\n",
Roeia Kishk's avatar
Roeia Kishk committed
87
88
89
90
91
92
93
94
    "        pip_list = !pip freeze\n",
    "        need_pytorch3d = not any(i.startswith(\"pytorch3d==\") for  i in pip_list)\n",
    "    if need_pytorch3d:\n",
    "        print(f\"failed to find/install wheel for {version_str}\")\n",
    "if need_pytorch3d:\n",
    "    print(\"Installing PyTorch3D from source\")\n",
    "    !pip install ninja\n",
    "    !pip install 'git+https://github.com/facebookresearch/pytorch3d.git@stable'"
95
96
97
98
   ]
  },
  {
   "cell_type": "code",
99
   "execution_count": null,
100
101
102
103
104
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "nX99zdoffBLg"
   },
facebook-github-bot's avatar
facebook-github-bot committed
105
106
107
108
109
110
111
   "outputs": [],
   "source": [
    "import os\n",
    "import torch\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "# Util function for loading meshes\n",
Nikhila Ravi's avatar
Nikhila Ravi committed
112
    "from pytorch3d.io import load_objs_as_meshes, load_obj\n",
facebook-github-bot's avatar
facebook-github-bot committed
113
114
    "\n",
    "# Data structures and functions for rendering\n",
115
    "from pytorch3d.structures import Meshes\n",
116
117
    "from pytorch3d.vis.plotly_vis import AxisArgs, plot_batch_individually, plot_scene\n",
    "from pytorch3d.vis.texture_vis import texturesuv_image_matplotlib\n",
facebook-github-bot's avatar
facebook-github-bot committed
118
119
    "from pytorch3d.renderer import (\n",
    "    look_at_view_transform,\n",
Georgia Gkioxari's avatar
Georgia Gkioxari committed
120
    "    FoVPerspectiveCameras, \n",
facebook-github-bot's avatar
facebook-github-bot committed
121
122
123
124
125
126
    "    PointLights, \n",
    "    DirectionalLights, \n",
    "    Materials, \n",
    "    RasterizationSettings, \n",
    "    MeshRenderer, \n",
    "    MeshRasterizer,  \n",
127
    "    SoftPhongShader,\n",
128
129
    "    TexturesUV,\n",
    "    TexturesVertex\n",
facebook-github-bot's avatar
facebook-github-bot committed
130
131
    ")\n",
    "\n",
132
    "# add path for demo utils functions \n",
facebook-github-bot's avatar
facebook-github-bot committed
133
134
    "import sys\n",
    "import os\n",
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
    "sys.path.append(os.path.abspath(''))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "Lxmehq6Zhrzv"
   },
   "source": [
    "If using **Google Colab**, fetch the utils file for plotting image grids:"
   ]
  },
  {
   "cell_type": "code",
150
   "execution_count": null,
151
152
153
154
155
156
157
158
159
160
161
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 204
    },
    "colab_type": "code",
    "id": "HZozr3Pmho-5",
    "outputId": "be5eb60d-5f65-4db1-cca0-44ee68c8f5fd"
   },
   "outputs": [],
   "source": [
162
    "!wget https://raw.githubusercontent.com/facebookresearch/pytorch3d/main/docs/tutorials/utils/plot_image_grid.py\n",
163
    "from plot_image_grid import image_grid"
facebook-github-bot's avatar
facebook-github-bot committed
164
165
166
167
168
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
169
170
    "colab_type": "text",
    "id": "g4B62MzYiJUM"
facebook-github-bot's avatar
facebook-github-bot committed
171
172
   },
   "source": [
173
174
175
176
177
    "OR if running **locally** uncomment and run the following cell:"
   ]
  },
  {
   "cell_type": "code",
178
   "execution_count": null,
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "paJ4Im8ahl7O"
   },
   "outputs": [],
   "source": [
    "# from utils import image_grid"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "5jGq772XfBLk"
   },
   "source": [
    "### 1. Load a mesh and texture file\n",
facebook-github-bot's avatar
facebook-github-bot committed
197
    "\n",
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
198
    "Load an `.obj` file and its associated `.mtl` file and create a **Textures** and **Meshes** object. \n",
facebook-github-bot's avatar
facebook-github-bot committed
199
    "\n",
200
    "**Meshes** is a unique datastructure provided in PyTorch3D for working with batches of meshes of different sizes. \n",
facebook-github-bot's avatar
facebook-github-bot committed
201
    "\n",
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
202
    "**TexturesUV** is an auxiliary datastructure for storing vertex uv and texture maps for meshes. \n",
facebook-github-bot's avatar
facebook-github-bot committed
203
204
205
206
    "\n",
    "**Meshes** has several class methods which are used throughout the rendering pipeline."
   ]
  },
207
208
209
210
211
212
213
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "a8eU4zo5jd_H"
   },
   "source": [
Nikhila Ravi's avatar
Nikhila Ravi committed
214
215
    "If running this notebook using **Google Colab**, run the following cell to fetch the mesh obj and texture files and save it at the path `data/cow_mesh`:\n",
    "If running locally, the data is already available at the correct path. "
216
217
   ]
  },
facebook-github-bot's avatar
facebook-github-bot committed
218
219
  {
   "cell_type": "code",
220
   "execution_count": null,
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 578
    },
    "colab_type": "code",
    "id": "tTm0cVuOjb1W",
    "outputId": "6cd7e2ec-65e1-4dcc-99e8-c347bc504f0a"
   },
   "outputs": [],
   "source": [
    "!mkdir -p data/cow_mesh\n",
    "!wget -P data/cow_mesh https://dl.fbaipublicfiles.com/pytorch3d/data/cow_mesh/cow.obj\n",
    "!wget -P data/cow_mesh https://dl.fbaipublicfiles.com/pytorch3d/data/cow_mesh/cow.mtl\n",
    "!wget -P data/cow_mesh https://dl.fbaipublicfiles.com/pytorch3d/data/cow_mesh/cow_texture.png"
   ]
  },
  {
   "cell_type": "code",
240
   "execution_count": null,
241
242
243
244
245
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "gi5Kd0GafBLl"
   },
facebook-github-bot's avatar
facebook-github-bot committed
246
247
248
   "outputs": [],
   "source": [
    "# Setup\n",
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
249
250
251
252
253
    "if torch.cuda.is_available():\n",
    "    device = torch.device(\"cuda:0\")\n",
    "    torch.cuda.set_device(device)\n",
    "else:\n",
    "    device = torch.device(\"cpu\")\n",
facebook-github-bot's avatar
facebook-github-bot committed
254
255
256
257
258
259
    "\n",
    "# Set paths\n",
    "DATA_DIR = \"./data\"\n",
    "obj_filename = os.path.join(DATA_DIR, \"cow_mesh/cow.obj\")\n",
    "\n",
    "# Load obj file\n",
260
    "mesh = load_objs_as_meshes([obj_filename], device=device)"
facebook-github-bot's avatar
facebook-github-bot committed
261
262
263
264
   ]
  },
  {
   "cell_type": "markdown",
265
266
267
268
   "metadata": {
    "colab_type": "text",
    "id": "5APAQs6-fBLp"
   },
facebook-github-bot's avatar
facebook-github-bot committed
269
270
271
272
273
274
   "source": [
    "#### Let's visualize the texture map"
   ]
  },
  {
   "cell_type": "code",
275
   "execution_count": null,
276
277
278
279
280
281
282
283
284
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 428
    },
    "colab_type": "code",
    "id": "YipUhrIHfBLq",
    "outputId": "48987b1d-5cc1-4c2a-cb3c-713d64f6a38d"
   },
285
   "outputs": [],
facebook-github-bot's avatar
facebook-github-bot committed
286
287
   "source": [
    "plt.figure(figsize=(7,7))\n",
288
    "texture_image=mesh.textures.maps_padded()\n",
facebook-github-bot's avatar
facebook-github-bot committed
289
    "plt.imshow(texture_image.squeeze().cpu().numpy())\n",
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
    "plt.axis(\"off\");"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "PyTorch3D has a built-in way to view the texture map with matplotlib along with the points on the map corresponding to vertices. There is also a method, texturesuv_image_PIL, to get a similar image which can be saved to a file."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.figure(figsize=(7,7))\n",
    "texturesuv_image_matplotlib(mesh.textures, subsample=None)\n",
    "plt.axis(\"off\");"
facebook-github-bot's avatar
facebook-github-bot committed
309
310
311
312
   ]
  },
  {
   "cell_type": "markdown",
313
314
315
316
   "metadata": {
    "colab_type": "text",
    "id": "GcnG6XJ6fBLu"
   },
facebook-github-bot's avatar
facebook-github-bot committed
317
   "source": [
318
    "## 2. Create a renderer\n",
facebook-github-bot's avatar
facebook-github-bot committed
319
    "\n",
320
    "A renderer in PyTorch3D is composed of a **rasterizer** and a **shader** which each have a number of subcomponents such as a **camera** (orthographic/perspective). Here we initialize some of these components and use default values for the rest.\n",
facebook-github-bot's avatar
facebook-github-bot committed
321
    "\n",
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
322
    "In this example we will first create a **renderer** which uses a **perspective camera**, a **point light** and applies **Phong shading**. Then we learn how to vary different components using the modular API.  "
facebook-github-bot's avatar
facebook-github-bot committed
323
324
325
326
   ]
  },
  {
   "cell_type": "code",
327
   "execution_count": null,
328
329
330
331
332
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "dX466mWnfBLv"
   },
facebook-github-bot's avatar
facebook-github-bot committed
333
334
   "outputs": [],
   "source": [
Georgia Gkioxari's avatar
Georgia Gkioxari committed
335
    "# Initialize a camera.\n",
336
337
338
    "# With world coordinates +Y up, +X left and +Z in, the front of the cow is facing the -Z direction. \n",
    "# So we move the camera by 180 in the azimuth direction so it is facing the front of the cow. \n",
    "R, T = look_at_view_transform(2.7, 0, 180) \n",
Georgia Gkioxari's avatar
Georgia Gkioxari committed
339
    "cameras = FoVPerspectiveCameras(device=device, R=R, T=T)\n",
facebook-github-bot's avatar
facebook-github-bot committed
340
341
342
    "\n",
    "# Define the settings for rasterization and shading. Here we set the output image to be of size\n",
    "# 512x512. As we are rendering images for visualization purposes only we will set faces_per_pixel=1\n",
Nikhila Ravi's avatar
Nikhila Ravi committed
343
344
345
346
    "# and blur_radius=0.0. We also set bin_size and max_faces_per_bin to None which ensure that \n",
    "# the faster coarse-to-fine rasterization method is used. Refer to rasterize_meshes.py for \n",
    "# explanations of these parameters. Refer to docs/notes/renderer.md for an explanation of \n",
    "# the difference between naive and coarse-to-fine rasterization. \n",
facebook-github-bot's avatar
facebook-github-bot committed
347
348
349
350
351
352
    "raster_settings = RasterizationSettings(\n",
    "    image_size=512, \n",
    "    blur_radius=0.0, \n",
    "    faces_per_pixel=1, \n",
    ")\n",
    "\n",
353
354
355
    "# Place a point light in front of the object. As mentioned above, the front of the cow is facing the \n",
    "# -z direction. \n",
    "lights = PointLights(device=device, location=[[0.0, 0.0, -3.0]])\n",
facebook-github-bot's avatar
facebook-github-bot committed
356
    "\n",
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
357
    "# Create a Phong renderer by composing a rasterizer and a shader. The textured Phong shader will \n",
facebook-github-bot's avatar
facebook-github-bot committed
358
359
360
361
362
363
364
    "# interpolate the texture uv coordinates for each vertex, sample from a texture image and \n",
    "# apply the Phong lighting model\n",
    "renderer = MeshRenderer(\n",
    "    rasterizer=MeshRasterizer(\n",
    "        cameras=cameras, \n",
    "        raster_settings=raster_settings\n",
    "    ),\n",
Nikhila Ravi's avatar
Nikhila Ravi committed
365
    "    shader=SoftPhongShader(\n",
facebook-github-bot's avatar
facebook-github-bot committed
366
367
368
369
370
371
372
373
374
    "        device=device, \n",
    "        cameras=cameras,\n",
    "        lights=lights\n",
    "    )\n",
    ")"
   ]
  },
  {
   "cell_type": "markdown",
375
376
377
378
   "metadata": {
    "colab_type": "text",
    "id": "KyOY5qXvfBLz"
   },
facebook-github-bot's avatar
facebook-github-bot committed
379
   "source": [
380
    "## 3. Render the mesh"
facebook-github-bot's avatar
facebook-github-bot committed
381
382
383
384
   ]
  },
  {
   "cell_type": "markdown",
385
386
387
388
   "metadata": {
    "colab_type": "text",
    "id": "8VkRA4qJfBL0"
   },
facebook-github-bot's avatar
facebook-github-bot committed
389
390
391
392
393
394
   "source": [
    "The light is in front of the object so it is bright and the image has specular highlights."
   ]
  },
  {
   "cell_type": "code",
395
   "execution_count": null,
396
397
398
399
400
401
402
403
404
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 592
    },
    "colab_type": "code",
    "id": "gBLZH8iUfBL1",
    "outputId": "cc3cd3f0-189e-4497-ce47-e64b4da542e8"
   },
405
   "outputs": [],
facebook-github-bot's avatar
facebook-github-bot committed
406
407
408
409
   "source": [
    "images = renderer(mesh)\n",
    "plt.figure(figsize=(10, 10))\n",
    "plt.imshow(images[0, ..., :3].cpu().numpy())\n",
410
    "plt.axis(\"off\");"
facebook-github-bot's avatar
facebook-github-bot committed
411
412
413
414
   ]
  },
  {
   "cell_type": "markdown",
415
416
417
418
   "metadata": {
    "colab_type": "text",
    "id": "k161XF3sfBL5"
   },
facebook-github-bot's avatar
facebook-github-bot committed
419
   "source": [
420
    "## 4. Move the light behind the object and re-render\n",
facebook-github-bot's avatar
facebook-github-bot committed
421
    "\n",
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
422
    "We can pass arbitrary keyword arguments to the `rasterizer`/`shader` via the call to the `renderer` so the renderer does not need to be reinitialized if any of the settings change/\n",
facebook-github-bot's avatar
facebook-github-bot committed
423
424
425
426
427
428
429
430
    "\n",
    "In this case, we can simply update the location of the lights and pass them into the call to the renderer. \n",
    "\n",
    "The image is now dark as there is only ambient lighting, and there are no specular highlights."
   ]
  },
  {
   "cell_type": "code",
431
   "execution_count": null,
432
433
434
435
436
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "BdWkkeibfBL6"
   },
facebook-github-bot's avatar
facebook-github-bot committed
437
438
   "outputs": [],
   "source": [
439
    "# Now move the light so it is on the +Z axis which will be behind the cow. \n",
facebook-github-bot's avatar
facebook-github-bot committed
440
441
442
443
444
445
    "lights.location = torch.tensor([0.0, 0.0, +1.0], device=device)[None]\n",
    "images = renderer(mesh, lights=lights)"
   ]
  },
  {
   "cell_type": "code",
446
   "execution_count": null,
447
448
449
450
451
452
453
454
455
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 592
    },
    "colab_type": "code",
    "id": "UmV3j1YffBL9",
    "outputId": "2e8edca0-5bd8-4a2f-a160-83c4b0520123"
   },
456
   "outputs": [],
facebook-github-bot's avatar
facebook-github-bot committed
457
458
459
   "source": [
    "plt.figure(figsize=(10, 10))\n",
    "plt.imshow(images[0, ..., :3].cpu().numpy())\n",
460
    "plt.axis(\"off\");"
facebook-github-bot's avatar
facebook-github-bot committed
461
462
463
464
   ]
  },
  {
   "cell_type": "markdown",
465
466
467
468
   "metadata": {
    "colab_type": "text",
    "id": "t93aVotMfBMB"
   },
facebook-github-bot's avatar
facebook-github-bot committed
469
   "source": [
470
    "## 5. Rotate the object, modify the material properties or light properties\n",
facebook-github-bot's avatar
facebook-github-bot committed
471
472
473
474
475
476
477
478
479
480
    "\n",
    "We can also change many other settings in the rendering pipeline. Here we:\n",
    "\n",
    "- change the **viewing angle** of the camera\n",
    "- change the **position** of the point light\n",
    "- change the **material reflectance** properties of the mesh"
   ]
  },
  {
   "cell_type": "code",
481
   "execution_count": null,
482
483
484
485
486
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "4mYXYziefBMB"
   },
facebook-github-bot's avatar
facebook-github-bot committed
487
488
   "outputs": [],
   "source": [
489
490
    "# Rotate the object by increasing the elevation and azimuth angles\n",
    "R, T = look_at_view_transform(dist=2.7, elev=10, azim=-150)\n",
Georgia Gkioxari's avatar
Georgia Gkioxari committed
491
    "cameras = FoVPerspectiveCameras(device=device, R=R, T=T)\n",
facebook-github-bot's avatar
facebook-github-bot committed
492
    "\n",
493
494
    "# Move the light location so the light is shining on the cow's face.  \n",
    "lights.location = torch.tensor([[2.0, 2.0, -2.0]], device=device)\n",
facebook-github-bot's avatar
facebook-github-bot committed
495
496
497
498
499
500
501
502
503
504
505
506
507
508
    "\n",
    "# Change specular color to green and change material shininess \n",
    "materials = Materials(\n",
    "    device=device,\n",
    "    specular_color=[[0.0, 1.0, 0.0]],\n",
    "    shininess=10.0\n",
    ")\n",
    "\n",
    "# Re render the mesh, passing in keyword arguments for the modified components.\n",
    "images = renderer(mesh, lights=lights, materials=materials, cameras=cameras)"
   ]
  },
  {
   "cell_type": "code",
509
   "execution_count": null,
510
511
512
513
514
515
516
517
518
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 592
    },
    "colab_type": "code",
    "id": "rHIxIfh5fBME",
    "outputId": "1ca2d337-2983-478f-b3c9-d64b84ba1a31"
   },
519
   "outputs": [],
facebook-github-bot's avatar
facebook-github-bot committed
520
521
522
   "source": [
    "plt.figure(figsize=(10, 10))\n",
    "plt.imshow(images[0, ..., :3].cpu().numpy())\n",
523
    "plt.axis(\"off\");"
facebook-github-bot's avatar
facebook-github-bot committed
524
525
526
527
   ]
  },
  {
   "cell_type": "markdown",
528
529
530
531
   "metadata": {
    "colab_type": "text",
    "id": "17c4xmtyfBMH"
   },
facebook-github-bot's avatar
facebook-github-bot committed
532
   "source": [
533
    "## 6. Batched Rendering\n",
facebook-github-bot's avatar
facebook-github-bot committed
534
    "\n",
535
    "One of the core design choices of the PyTorch3D API is to support **batched inputs for all components**. \n",
facebook-github-bot's avatar
facebook-github-bot committed
536
537
538
539
540
    "The renderer and associated components can take batched inputs and **render a batch of output images in one forward pass**. We will now use this feature to render the mesh from many different viewpoints.\n"
   ]
  },
  {
   "cell_type": "code",
541
   "execution_count": null,
542
543
544
545
546
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "CDQKebNNfBMI"
   },
facebook-github-bot's avatar
facebook-github-bot committed
547
548
549
550
551
552
553
554
555
556
557
   "outputs": [],
   "source": [
    "# Set batch size - this is the number of different viewpoints from which we want to render the mesh.\n",
    "batch_size = 20\n",
    "\n",
    "# Create a batch of meshes by repeating the cow mesh and associated textures. \n",
    "# Meshes has a useful `extend` method which allows us do this very easily. \n",
    "# This also extends the textures. \n",
    "meshes = mesh.extend(batch_size)\n",
    "\n",
    "# Get a batch of viewing angles. \n",
558
559
    "elev = torch.linspace(0, 180, batch_size)\n",
    "azim = torch.linspace(-180, 180, batch_size)\n",
facebook-github-bot's avatar
facebook-github-bot committed
560
561
562
563
564
    "\n",
    "# All the cameras helper methods support mixed type inputs and broadcasting. So we can \n",
    "# view the camera from the same distance and specify dist=2.7 as a float,\n",
    "# and then specify elevation and azimuth angles for each viewpoint as tensors. \n",
    "R, T = look_at_view_transform(dist=2.7, elev=elev, azim=azim)\n",
Georgia Gkioxari's avatar
Georgia Gkioxari committed
565
    "cameras = FoVPerspectiveCameras(device=device, R=R, T=T)\n",
facebook-github-bot's avatar
facebook-github-bot committed
566
    "\n",
567
568
    "# Move the light back in front of the cow which is facing the -z direction.\n",
    "lights.location = torch.tensor([[0.0, 0.0, -3.0]], device=device)"
facebook-github-bot's avatar
facebook-github-bot committed
569
570
571
572
   ]
  },
  {
   "cell_type": "code",
573
   "execution_count": null,
574
575
576
577
578
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "gyYJCwEDfBML"
   },
facebook-github-bot's avatar
facebook-github-bot committed
579
580
   "outputs": [],
   "source": [
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
581
    "# We can pass arbitrary keyword arguments to the rasterizer/shader via the renderer\n",
facebook-github-bot's avatar
facebook-github-bot committed
582
    "# so the renderer does not need to be reinitialized if any of the settings change.\n",
583
    "images = renderer(meshes, cameras=cameras, lights=lights)"
facebook-github-bot's avatar
facebook-github-bot committed
584
585
586
587
   ]
  },
  {
   "cell_type": "code",
588
   "execution_count": null,
589
   "metadata": {},
590
   "outputs": [],
facebook-github-bot's avatar
facebook-github-bot committed
591
592
593
   "source": [
    "image_grid(images.cpu().numpy(), rows=4, cols=5, rgb=True)"
   ]
594
  },
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 7. Plotly visualization \n",
    "If you only want to visualize a mesh, you don't really need to use a differentiable renderer - instead we support plotting of Meshes with plotly. For these Meshes, we use TexturesVertex to define a texture for the rendering.\n",
    "`plot_meshes` creates a Plotly figure with a trace for each Meshes object. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "verts, faces_idx, _ = load_obj(obj_filename)\n",
    "faces = faces_idx.verts_idx\n",
    "\n",
    "# Initialize each vertex to be white in color.\n",
    "verts_rgb = torch.ones_like(verts)[None]  # (1, V, 3)\n",
    "textures = TexturesVertex(verts_features=verts_rgb.to(device))\n",
    "\n",
    "# Create a Meshes object\n",
    "mesh = Meshes(\n",
    "    verts=[verts.to(device)],   \n",
    "    faces=[faces.to(device)],\n",
    "    textures=textures\n",
    ")\n",
    "\n",
    "# Render the plotly figure\n",
625
626
627
628
629
    "fig = plot_scene({\n",
    "    \"subplot1\": {\n",
    "        \"cow_mesh\": mesh\n",
    "    }\n",
    "})\n",
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
    "fig.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# use Plotly's default colors (no texture)\n",
    "mesh = Meshes(\n",
    "    verts=[verts.to(device)],   \n",
    "    faces=[faces.to(device)]\n",
    ")\n",
    "\n",
    "# Render the plotly figure\n",
646
647
648
649
650
651
    "fig = plot_scene({\n",
    "    \"subplot1\": {\n",
    "        \"cow_mesh\": mesh\n",
    "    }\n",
    "})\n",
    "fig.show()"
652
653
654
655
656
657
658
659
660
661
662
663
664
665
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# create a batch of meshes, and offset one to prevent overlap\n",
    "mesh_batch = Meshes(\n",
    "    verts=[verts.to(device), (verts + 2).to(device)],   \n",
    "    faces=[faces.to(device), faces.to(device)]\n",
    ")\n",
    "\n",
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
    "# plot mesh batch in the same trace\n",
    "fig = plot_scene({\n",
    "    \"subplot1\": {\n",
    "        \"cow_mesh_batch\": mesh_batch\n",
    "    }\n",
    "})\n",
    "fig.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# plot batch of meshes in different traces\n",
    "fig = plot_scene({\n",
    "    \"subplot1\": {\n",
    "        \"cow_mesh1\": mesh_batch[0],\n",
    "        \"cow_mesh2\": mesh_batch[1]\n",
    "    }\n",
    "})\n",
    "fig.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# plot batch of meshes in different subplots\n",
    "fig = plot_scene({\n",
    "    \"subplot1\": {\n",
    "        \"cow_mesh1\": mesh_batch[0]\n",
    "    },\n",
    "    \"subplot2\":{\n",
    "        \"cow_mesh2\": mesh_batch[1]\n",
    "    }\n",
    "})\n",
    "fig.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "For batches, we can also use `plot_batch_individually` to avoid constructing the scene dictionary ourselves."
714
715
716
717
718
719
720
721
722
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# extend the batch to have 4 meshes\n",
723
    "mesh_4 = mesh_batch.extend(2)\n",
724
725
    "\n",
    "# visualize the batch in different subplots, 2 per row\n",
726
    "fig = plot_batch_individually(mesh_4)\n",
727
    "# we can update the figure height and width\n",
728
    "fig.update_layout(height=1000, width=500)\n",
729
730
731
732
733
734
735
    "fig.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
736
    "We can also modify the axis arguments and axis backgrounds in both functions. "
737
738
739
740
741
742
743
744
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
745
746
747
748
749
750
751
752
753
    "fig2 = plot_scene({\n",
    "    \"cow_plot1\": {\n",
    "        \"cows\": mesh_batch\n",
    "    }\n",
    "},\n",
    "    xaxis={\"backgroundcolor\":\"rgb(200, 200, 230)\"},\n",
    "    yaxis={\"backgroundcolor\":\"rgb(230, 200, 200)\"},\n",
    "    zaxis={\"backgroundcolor\":\"rgb(200, 230, 200)\"}, \n",
    "    axis_args=AxisArgs(showgrid=True))\n",
754
755
756
    "fig2.show()"
   ]
  },
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "fig3 = plot_batch_individually(\n",
    "    mesh_4, \n",
    "    ncols=2,\n",
    "    subplot_titles = [\"cow1\", \"cow2\", \"cow3\", \"cow4\"], # customize subplot titles\n",
    "    xaxis={\"backgroundcolor\":\"rgb(200, 200, 230)\"},\n",
    "    yaxis={\"backgroundcolor\":\"rgb(230, 200, 200)\"},\n",
    "    zaxis={\"backgroundcolor\":\"rgb(200, 230, 200)\"}, \n",
    "    axis_args=AxisArgs(showgrid=True))\n",
    "fig3.show()"
   ]
  },
774
775
776
777
778
779
780
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "t3qphI1ElUb5"
   },
   "source": [
781
782
    "## 8. Conclusion\n",
    "In this tutorial we learnt how to **load** a textured mesh from an obj file, initialize a PyTorch3D datastructure called **Meshes**, set up an **Renderer** consisting of a **Rasterizer** and a **Shader**, and modify several components of the rendering pipeline. We also learned how to render Meshes in Plotly figures."
783
   ]
facebook-github-bot's avatar
facebook-github-bot committed
784
785
786
  }
 ],
 "metadata": {
787
  "accelerator": "GPU",
Nikhila Ravi's avatar
Nikhila Ravi committed
788
  "anp_metadata": {
789
   "path": "notebooks/render_textured_meshes.ipynb"
Nikhila Ravi's avatar
Nikhila Ravi committed
790
  },
facebook-github-bot's avatar
facebook-github-bot committed
791
792
793
794
795
796
797
798
  "bento_stylesheets": {
   "bento/extensions/flow/main.css": true,
   "bento/extensions/kernel_selector/main.css": true,
   "bento/extensions/kernel_ui/main.css": true,
   "bento/extensions/new_kernel/main.css": true,
   "bento/extensions/system_usage/main.css": true,
   "bento/extensions/theme/main.css": true
  },
799
800
801
802
  "colab": {
   "name": "render_textured_meshes.ipynb",
   "provenance": []
  },
Nikhila Ravi's avatar
Nikhila Ravi committed
803
804
805
  "disseminate_notebook_info": {
   "backup_notebook_id": "569222367081034"
  },
facebook-github-bot's avatar
facebook-github-bot committed
806
  "kernelspec": {
807
   "display_name": "pytorch3d_etc (local)",
facebook-github-bot's avatar
facebook-github-bot committed
808
   "language": "python",
809
   "name": "pytorch3d_etc_local"
facebook-github-bot's avatar
facebook-github-bot committed
810
811
812
813
814
815
816
817
818
819
820
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
821
   "version": "3.7.5+"
facebook-github-bot's avatar
facebook-github-bot committed
822
823
824
  }
 },
 "nbformat": 4,
825
 "nbformat_minor": 1
facebook-github-bot's avatar
facebook-github-bot committed
826
}