Commit b9e5d37d authored by Michael Carilli's avatar Michael Carilli
Browse files

Docstring updates

parent 17e8a552
...@@ -21,7 +21,7 @@ class FusedAdam(torch.optim.Optimizer): ...@@ -21,7 +21,7 @@ class FusedAdam(torch.optim.Optimizer):
opt.step() opt.step()
:class:`apex.optimizers.FusedAdam` may be used with or without Amp. If you wish to use :class:`FusedAdam` with Amp, :class:`apex.optimizers.FusedAdam` may be used with or without Amp. If you wish to use :class:`FusedAdam` with Amp,
you may choose any `opt_level`:: you may choose any ``opt_level``::
opt = apex.optimizers.FusedAdam(model.parameters(), lr = ....) opt = apex.optimizers.FusedAdam(model.parameters(), lr = ....)
model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2") model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2")
......
...@@ -20,7 +20,7 @@ class FusedLAMB(torch.optim.Optimizer): ...@@ -20,7 +20,7 @@ class FusedLAMB(torch.optim.Optimizer):
opt.step() opt.step()
:class:`apex.optimizers.FusedLAMB` may be used with or without Amp. If you wish to use :class:`FusedLAMB` with Amp, :class:`apex.optimizers.FusedLAMB` may be used with or without Amp. If you wish to use :class:`FusedLAMB` with Amp,
you may choose any `opt_level`:: you may choose any ``opt_level``::
opt = apex.optimizers.FusedLAMB(model.parameters(), lr = ....) opt = apex.optimizers.FusedLAMB(model.parameters(), lr = ....)
model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2") model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2")
......
...@@ -20,7 +20,7 @@ class FusedNovoGrad(torch.optim.Optimizer): ...@@ -20,7 +20,7 @@ class FusedNovoGrad(torch.optim.Optimizer):
opt.step() opt.step()
:class:`apex.optimizers.FusedNovoGrad` may be used with or without Amp. If you wish to use :class:`FusedNovoGrad` with Amp, :class:`apex.optimizers.FusedNovoGrad` may be used with or without Amp. If you wish to use :class:`FusedNovoGrad` with Amp,
you may choose any `opt_level`:: you may choose any ``opt_level``::
opt = apex.optimizers.FusedNovoGrad(model.parameters(), lr = ....) opt = apex.optimizers.FusedNovoGrad(model.parameters(), lr = ....)
model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2") model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2")
......
...@@ -21,7 +21,7 @@ class FusedSGD(Optimizer): ...@@ -21,7 +21,7 @@ class FusedSGD(Optimizer):
opt.step() opt.step()
:class:`apex.optimizers.FusedSGD` may be used with or without Amp. If you wish to use :class:`FusedSGD` with Amp, :class:`apex.optimizers.FusedSGD` may be used with or without Amp. If you wish to use :class:`FusedSGD` with Amp,
you may choose any `opt_level`:: you may choose any ``opt_level``::
opt = apex.optimizers.FusedSGD(model.parameters(), lr = ....) opt = apex.optimizers.FusedSGD(model.parameters(), lr = ....)
model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2") model, opt = amp.initialize(model, opt, opt_level="O0" or "O1 or "O2")
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment