"vscode:/vscode.git/clone" did not exist on "2a48a100435dc823cec4b6f3062575e1032f07c5"
Unverified Commit d67b8c32 authored by Ruilong Li(李瑞龙)'s avatar Ruilong Li(李瑞龙) Committed by GitHub
Browse files

Reformatting and Update Tests (#46)

* more organized version

* add copyrights

* proper versioning

* proper versioning

* cleanup project; read version from pyproject

* read version from pyproject

* update weblink

* cleanup nerfacc file structure

* propoer test contraction

* proper test for intersection

* proper tests for pack, grid, and intersection

* proper testing for rendering

* bug fix

* proper testing for marching

* run check reformat

* add hints on nvcc not found

* add doc to readme

* resume github check

* update readthedocs env

* rm tool.setuptools.packages.find
parent 300ec71a
...@@ -14,5 +14,8 @@ python: ...@@ -14,5 +14,8 @@ python:
# Equivalent to 'pip install .' # Equivalent to 'pip install .'
- method: pip - method: pip
path: . path: .
# Equivalent to 'pip install -r docs/requirements.txt' # Equivalent to 'pip install .[docs]'
- requirements: docs/requirements.txt - method: pip
path: .
extra_requirements:
- docs
\ No newline at end of file
# nerfacc # NerfAcc
[![Core Tests.](https://github.com/KAIR-BAIR/nerfacc/actions/workflows/code_checks.yml/badge.svg)](https://github.com/KAIR-BAIR/nerfacc/actions/workflows/code_checks.yml) [![Core Tests.](https://github.com/KAIR-BAIR/nerfacc/actions/workflows/code_checks.yml/badge.svg)](https://github.com/KAIR-BAIR/nerfacc/actions/workflows/code_checks.yml)
[![Documentation Status](https://readthedocs.com/projects/plenoptix-nerfacc/badge/?version=latest)](https://plenoptix-nerfacc.readthedocs-hosted.com/en/latest/?badge=latest) [![Documentation Status](https://readthedocs.com/projects/plenoptix-nerfacc/badge/?version=latest)](https://plenoptix-nerfacc.readthedocs-hosted.com/en/latest/?badge=latest)
https://www.nerfacc.com/
This is a **tiny** toolbox for **accelerating** NeRF training & rendering using PyTorch CUDA extensions. Plug-and-play for most of the NeRFs! This is a **tiny** toolbox for **accelerating** NeRF training & rendering using PyTorch CUDA extensions. Plug-and-play for most of the NeRFs!
## Installation
```
pip install nerfacc
```
## Examples: ## Examples:
``` bash ``` bash
# Instant-NGP Nerf # Instant-NGP NeRF in 4.5 minutes.
# See results at here: https://www.nerfacc.com/en/latest/examples/ngp.html
python examples/train_ngp_nerf.py --train_split trainval --scene lego python examples/train_ngp_nerf.py --train_split trainval --scene lego
``` ```
``` bash ``` bash
# Vanilla MLP Nerf # Vanilla MLP NeRF in 1 hour.
# See results at here: https://www.nerfacc.com/en/latest/examples/vanilla.html
python examples/train_mlp_nerf.py --train_split train --scene lego python examples/train_mlp_nerf.py --train_split train --scene lego
``` ```
```bash ```bash
# MLP Nerf on Dynamic objects (D-Nerf) # T-NeRF for Dynamic objects in 1 hour.
# See results at here: https://www.nerfacc.com/en/latest/examples/dnerf.html
python examples/train_mlp_dnerf.py --train_split train --scene lego python examples/train_mlp_dnerf.py --train_split train --scene lego
``` ```
```bash ```bash
# NGP on MipNeRF360 unbounded scene # Unbounded scene in 1 hour.
# See results at here: https://www.nerfacc.com/en/latest/examples/unbounded.html
python examples/train_ngp_nerf.py --train_split train --scene garden --auto_aabb --unbounded --cone_angle=0.004 python examples/train_ngp_nerf.py --train_split train --scene garden --auto_aabb --unbounded --cone_angle=0.004
``` ```
sphinx==4.5.0
sphinx-copybutton==0.5.0
sphinx-design==0.2.0
pytorch_sphinx_theme @ git+https://github.com/liruilong940607/pytorch_sphinx_theme.git#egg=pytorch_sphinx_theme
sphinx_copybutton
\ No newline at end of file
# Configuration file for the Sphinx documentation builder.
import pytorch_sphinx_theme import pytorch_sphinx_theme
from nerfacc import __version__
# -- Project information # -- Project information
project = "nerfacc" project = "nerfacc"
copyright = "2022, Ruilong" copyright = "2022, Ruilong"
author = "Ruilong" author = "Ruilong"
release = "0.1.4" release = __version__
version = "0.1.4"
# -- General configuration # -- General configuration
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import json import json
import os import os
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import collections import collections
import os import os
import sys import sys
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import collections import collections
import json import json
import os import os
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import collections import collections
Rays = collections.namedtuple("Rays", ("origins", "viewdirs")) Rays = collections.namedtuple("Rays", ("origins", "viewdirs"))
......
""" The MLPs and Voxels. """ """
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import functools import functools
import math import math
from typing import Callable, Optional from typing import Callable, Optional
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
from typing import Callable, List, Union from typing import Callable, List, Union
import torch import torch
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import argparse import argparse
import math import math
import os import os
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import argparse import argparse
import math import math
import os import os
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import argparse import argparse
import math import math
import os import os
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import random import random
from typing import Optional from typing import Optional
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
from .contraction import ContractionType, contract, contract_inv from .contraction import ContractionType, contract, contract_inv
from .grid import Grid, OccupancyGrid from .grid import Grid, OccupancyGrid
from .pipeline import rendering from .intersection import ray_aabb_intersect
from .ray_marching import ( from .pack import unpack_to_ray_indices
ray_aabb_intersect, from .ray_marching import ray_marching
ray_marching, from .version import __version__
unpack_to_ray_indices,
)
from .vol_rendering import ( from .vol_rendering import (
accumulate_along_rays, accumulate_along_rays,
render_visibility, render_visibility,
render_weight_from_alpha, render_weight_from_alpha,
render_weight_from_density, render_weight_from_density,
rendering,
) )
__all__ = [ __all__ = [
...@@ -27,4 +30,5 @@ __all__ = [ ...@@ -27,4 +30,5 @@ __all__ = [
"render_weight_from_alpha", "render_weight_from_alpha",
"render_weight_from_density", "render_weight_from_density",
"rendering", "rendering",
"__version__",
] ]
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
from enum import Enum from enum import Enum
import torch import torch
......
"""
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
from typing import Any, Callable from typing import Any, Callable
......
"""Setup cuda backend.""" """
Copyright (c) 2022 Ruilong Li, UC Berkeley.
"""
import glob import glob
import os import os
from subprocess import DEVNULL, call from subprocess import DEVNULL, call
...@@ -31,5 +34,8 @@ if cuda_toolkit_available(): ...@@ -31,5 +34,8 @@ if cuda_toolkit_available():
extra_cflags=["-O3"], extra_cflags=["-O3"],
extra_cuda_cflags=["-O3"], extra_cuda_cflags=["-O3"],
) )
else:
console = Console()
console.print("[bold red]No CUDA toolkit found. NerfAcc will be disabled.")
__all__ = ["_C"] __all__ = ["_C"]
/*
* Copyright (c) 2022 Ruilong Li, UC Berkeley.
*/
#include "include/helpers_cuda.h" #include "include/helpers_cuda.h"
#include "include/helpers_math.h" #include "include/helpers_math.h"
#include "include/helpers_contraction.h" #include "include/helpers_contraction.h"
......
/*
* Copyright (c) 2022 Ruilong Li, UC Berkeley.
*/
#pragma once #pragma once
#include "helpers_math.h" #include "helpers_math.h"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment