Unverified Commit 7c5203eb authored by Benjamin Lefaudeux's avatar Benjamin Lefaudeux Committed by GitHub
Browse files

[chore] Documentation fixes, no more ref issues and more API fields (#103)

* various fixes, no more issues with `make html` and more API fields should be populated
parent b488dcfa
...@@ -2,3 +2,5 @@ Pipe ...@@ -2,3 +2,5 @@ Pipe
==== ====
.. autoclass:: fairscale.nn.Pipe .. autoclass:: fairscale.nn.Pipe
:members:
:undoc-members:
...@@ -2,3 +2,5 @@ OSS ...@@ -2,3 +2,5 @@ OSS
==== ====
.. autoclass:: fairscale.optim.OSS .. autoclass:: fairscale.optim.OSS
:members:
:undoc-members:
...@@ -37,9 +37,7 @@ release = "0.0.2" ...@@ -37,9 +37,7 @@ release = "0.0.2"
# Add any Sphinx extension module names here, as strings. They can be # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = [ extensions = ["sphinx.ext.autodoc", "sphinx.ext.autosectionlabel"]
"sphinx.ext.autodoc",
]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"] templates_path = ["_templates"]
......
...@@ -290,7 +290,7 @@ class Pipe(Module): ...@@ -290,7 +290,7 @@ class Pipe(Module):
``'except_last'``, or ``'never'`` (default: ``'except_last'``) ``'except_last'``, or ``'never'`` (default: ``'except_last'``)
deferred_batch_norm (bool): deferred_batch_norm (bool):
whether to use deferred BatchNorm moving statistics (default: whether to use deferred BatchNorm moving statistics (default:
:data:`False`, see :ref:`Deferred Batch Normalization` for more :data:`False`, see :class:`DeferredBatchNorm` for more
details) details)
pipelined_backward (bool, optional): pipelined_backward (bool, optional):
if True, call torch.autograd.backward once per microbatch on the if True, call torch.autograd.backward once per microbatch on the
...@@ -527,15 +527,15 @@ class Pipe(Module): ...@@ -527,15 +527,15 @@ class Pipe(Module):
return super().cpu() return super().cpu()
def to(self, *args: Any, **kwargs: Any) -> "Pipe": def to(self, *args: Any, **kwargs: Any) -> "Pipe":
# Deny these usages: """ Restrict .to() options.
#
# - to(device[, dtype, non_blocking]) Deny these usages:
# - to(tensor[, non_blocking]) - to(device[, dtype, non_blocking])
# - to(tensor[, non_blocking])
# But allow this:
# But allow this:
# - to(dtype[, non_blocking]) - to(dtype[, non_blocking])
# """
if self.devices: if self.devices:
if "device" in kwargs or "tensor" in kwargs: if "device" in kwargs or "tensor" in kwargs:
raise MOVING_DENIED raise MOVING_DENIED
......
...@@ -16,6 +16,8 @@ from torch.optim import SGD, Optimizer ...@@ -16,6 +16,8 @@ from torch.optim import SGD, Optimizer
from .utils import broadcast_object, recursive_copy_to_device from .utils import broadcast_object, recursive_copy_to_device
__all__ = ["OSS"]
if TYPE_CHECKING: # pragma: no cover if TYPE_CHECKING: # pragma: no cover
from torch.optim.optimizer import _params_t from torch.optim.optimizer import _params_t
else: else:
...@@ -25,7 +27,9 @@ else: ...@@ -25,7 +27,9 @@ else:
class OSS(Optimizer): class OSS(Optimizer):
"""Wraps an arbitrary :class:`optim.Optimizer <torch.optim.Optimizer>` """Wraps an arbitrary :class:`optim.Optimizer <torch.optim.Optimizer>`
optimizer and shards its state as described by ZeRO_. optimizer and shards its state as described by ZeRO_.
:: opt = OSS(params, optim=torch.optim.Adam, lr=0.01) ::
opt = OSS(params, optim=torch.optim.Adam, lr=0.01)
.. _ZeRO: https://arxiv.org/abs/1910.02054 .. _ZeRO: https://arxiv.org/abs/1910.02054
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment