Unverified Commit e4b5348e authored by Zaida Zhou's avatar Zaida Zhou Committed by GitHub
Browse files

[Docs] Refactor the structure of documentation (#1580)

* [Docs] Refactor the structure of documentation

* [Docs] Refactor the structure of documentation

* fix symlink

* fix link

* fix typo

* polish docstring

* fix docstring
parent f31f1cdb
...@@ -548,7 +548,7 @@ def _initialize_override(module, override, cfg): ...@@ -548,7 +548,7 @@ def _initialize_override(module, override, cfg):
def initialize(module, init_cfg): def initialize(module, init_cfg):
"""Initialize a module. r"""Initialize a module.
Args: Args:
module (``torch.nn.Module``): the module will be initialized. module (``torch.nn.Module``): the module will be initialized.
...@@ -556,6 +556,7 @@ def initialize(module, init_cfg): ...@@ -556,6 +556,7 @@ def initialize(module, init_cfg):
define initializer. OpenMMLab has implemented 6 initializers define initializer. OpenMMLab has implemented 6 initializers
including ``Constant``, ``Xavier``, ``Normal``, ``Uniform``, including ``Constant``, ``Xavier``, ``Normal``, ``Uniform``,
``Kaiming``, and ``Pretrained``. ``Kaiming``, and ``Pretrained``.
Example: Example:
>>> module = nn.Linear(2, 3, bias=True) >>> module = nn.Linear(2, 3, bias=True)
>>> init_cfg = dict(type='Constant', layer='Linear', val =1 , bias =2) >>> init_cfg = dict(type='Constant', layer='Linear', val =1 , bias =2)
......
...@@ -260,8 +260,9 @@ def soft_nms(boxes, ...@@ -260,8 +260,9 @@ def soft_nms(boxes,
def batched_nms(boxes, scores, idxs, nms_cfg, class_agnostic=False): def batched_nms(boxes, scores, idxs, nms_cfg, class_agnostic=False):
r"""Performs non-maximum suppression in a batched fashion. r"""Performs non-maximum suppression in a batched fashion.
Modified from Modified from `torchvision/ops/boxes.py#L39
https://github.com/pytorch/vision/blob/505cd6957711af790211896d32b40291bea1bc21/torchvision/ops/boxes.py#L39. <https://github.com/pytorch/vision/blob/
505cd6957711af790211896d32b40291bea1bc21/torchvision/ops/boxes.py#L39>`_.
In order to perform NMS independently per class, we add an offset to all In order to perform NMS independently per class, we add an offset to all
the boxes. The offset is dependent only on the class idx, and is large the boxes. The offset is dependent only on the class idx, and is large
enough so that boxes from different classes do not overlap. enough so that boxes from different classes do not overlap.
......
...@@ -14,10 +14,21 @@ from .hooks import (HOOKS, CheckpointHook, ClosureHook, DistEvalHook, ...@@ -14,10 +14,21 @@ from .hooks import (HOOKS, CheckpointHook, ClosureHook, DistEvalHook,
DistSamplerSeedHook, DvcliveLoggerHook, EMAHook, EvalHook, DistSamplerSeedHook, DvcliveLoggerHook, EMAHook, EvalHook,
Fp16OptimizerHook, GradientCumulativeFp16OptimizerHook, Fp16OptimizerHook, GradientCumulativeFp16OptimizerHook,
GradientCumulativeOptimizerHook, Hook, IterTimerHook, GradientCumulativeOptimizerHook, Hook, IterTimerHook,
LoggerHook, LrUpdaterHook, MlflowLoggerHook, LoggerHook, MlflowLoggerHook, NeptuneLoggerHook,
NeptuneLoggerHook, OptimizerHook, PaviLoggerHook, OptimizerHook, PaviLoggerHook, SyncBuffersHook,
SyncBuffersHook, TensorboardLoggerHook, TextLoggerHook, TensorboardLoggerHook, TextLoggerHook, WandbLoggerHook)
WandbLoggerHook) from .hooks.lr_updater import StepLrUpdaterHook # noqa
from .hooks.lr_updater import (CosineAnnealingLrUpdaterHook,
CosineRestartLrUpdaterHook, CyclicLrUpdaterHook,
ExpLrUpdaterHook, FixedLrUpdaterHook,
FlatCosineAnnealingLrUpdaterHook,
InvLrUpdaterHook, LrUpdaterHook,
OneCycleLrUpdaterHook, PolyLrUpdaterHook)
from .hooks.momentum_updater import (CosineAnnealingMomentumUpdaterHook,
CyclicMomentumUpdaterHook,
MomentumUpdaterHook,
OneCycleMomentumUpdaterHook,
StepMomentumUpdaterHook)
from .iter_based_runner import IterBasedRunner, IterLoader from .iter_based_runner import IterBasedRunner, IterLoader
from .log_buffer import LogBuffer from .log_buffer import LogBuffer
from .optimizer import (OPTIMIZER_BUILDERS, OPTIMIZERS, from .optimizer import (OPTIMIZER_BUILDERS, OPTIMIZERS,
...@@ -29,6 +40,12 @@ from .utils import get_host_info, get_time_str, obj_from_dict, set_random_seed ...@@ -29,6 +40,12 @@ from .utils import get_host_info, get_time_str, obj_from_dict, set_random_seed
__all__ = [ __all__ = [
'BaseRunner', 'Runner', 'EpochBasedRunner', 'IterBasedRunner', 'LogBuffer', 'BaseRunner', 'Runner', 'EpochBasedRunner', 'IterBasedRunner', 'LogBuffer',
'HOOKS', 'Hook', 'CheckpointHook', 'ClosureHook', 'LrUpdaterHook', 'HOOKS', 'Hook', 'CheckpointHook', 'ClosureHook', 'LrUpdaterHook',
'FixedLrUpdaterHook', 'StepLrUpdaterHook', 'ExpLrUpdaterHook',
'PolyLrUpdaterHook', 'InvLrUpdaterHook', 'CosineAnnealingLrUpdaterHook',
'FlatCosineAnnealingLrUpdaterHook', 'CosineRestartLrUpdaterHook',
'CyclicLrUpdaterHook', 'OneCycleLrUpdaterHook', 'MomentumUpdaterHook',
'StepMomentumUpdaterHook', 'CosineAnnealingMomentumUpdaterHook',
'CyclicMomentumUpdaterHook', 'OneCycleMomentumUpdaterHook',
'OptimizerHook', 'IterTimerHook', 'DistSamplerSeedHook', 'LoggerHook', 'OptimizerHook', 'IterTimerHook', 'DistSamplerSeedHook', 'LoggerHook',
'PaviLoggerHook', 'TextLoggerHook', 'TensorboardLoggerHook', 'PaviLoggerHook', 'TextLoggerHook', 'TensorboardLoggerHook',
'NeptuneLoggerHook', 'WandbLoggerHook', 'MlflowLoggerHook', 'NeptuneLoggerHook', 'WandbLoggerHook', 'MlflowLoggerHook',
......
...@@ -8,9 +8,17 @@ from .iter_timer import IterTimerHook ...@@ -8,9 +8,17 @@ from .iter_timer import IterTimerHook
from .logger import (DvcliveLoggerHook, LoggerHook, MlflowLoggerHook, from .logger import (DvcliveLoggerHook, LoggerHook, MlflowLoggerHook,
NeptuneLoggerHook, PaviLoggerHook, TensorboardLoggerHook, NeptuneLoggerHook, PaviLoggerHook, TensorboardLoggerHook,
TextLoggerHook, WandbLoggerHook) TextLoggerHook, WandbLoggerHook)
from .lr_updater import LrUpdaterHook from .lr_updater import (CosineAnnealingLrUpdaterHook,
CosineRestartLrUpdaterHook, CyclicLrUpdaterHook,
ExpLrUpdaterHook, FixedLrUpdaterHook,
FlatCosineAnnealingLrUpdaterHook, InvLrUpdaterHook,
LrUpdaterHook, OneCycleLrUpdaterHook,
PolyLrUpdaterHook, StepLrUpdaterHook)
from .memory import EmptyCacheHook from .memory import EmptyCacheHook
from .momentum_updater import MomentumUpdaterHook from .momentum_updater import (CosineAnnealingMomentumUpdaterHook,
CyclicMomentumUpdaterHook, MomentumUpdaterHook,
OneCycleMomentumUpdaterHook,
StepMomentumUpdaterHook)
from .optimizer import (Fp16OptimizerHook, GradientCumulativeFp16OptimizerHook, from .optimizer import (Fp16OptimizerHook, GradientCumulativeFp16OptimizerHook,
GradientCumulativeOptimizerHook, OptimizerHook) GradientCumulativeOptimizerHook, OptimizerHook)
from .profiler import ProfilerHook from .profiler import ProfilerHook
...@@ -19,11 +27,16 @@ from .sync_buffer import SyncBuffersHook ...@@ -19,11 +27,16 @@ from .sync_buffer import SyncBuffersHook
__all__ = [ __all__ = [
'HOOKS', 'Hook', 'CheckpointHook', 'ClosureHook', 'LrUpdaterHook', 'HOOKS', 'Hook', 'CheckpointHook', 'ClosureHook', 'LrUpdaterHook',
'OptimizerHook', 'Fp16OptimizerHook', 'IterTimerHook', 'FixedLrUpdaterHook', 'StepLrUpdaterHook', 'ExpLrUpdaterHook',
'DistSamplerSeedHook', 'EmptyCacheHook', 'LoggerHook', 'MlflowLoggerHook', 'PolyLrUpdaterHook', 'InvLrUpdaterHook', 'CosineAnnealingLrUpdaterHook',
'PaviLoggerHook', 'TextLoggerHook', 'TensorboardLoggerHook', 'FlatCosineAnnealingLrUpdaterHook', 'CosineRestartLrUpdaterHook',
'NeptuneLoggerHook', 'WandbLoggerHook', 'DvcliveLoggerHook', 'CyclicLrUpdaterHook', 'OneCycleLrUpdaterHook', 'OptimizerHook',
'MomentumUpdaterHook', 'SyncBuffersHook', 'EMAHook', 'EvalHook', 'Fp16OptimizerHook', 'IterTimerHook', 'DistSamplerSeedHook',
'DistEvalHook', 'ProfilerHook', 'GradientCumulativeOptimizerHook', 'EmptyCacheHook', 'LoggerHook', 'MlflowLoggerHook', 'PaviLoggerHook',
'GradientCumulativeFp16OptimizerHook' 'TextLoggerHook', 'TensorboardLoggerHook', 'NeptuneLoggerHook',
'WandbLoggerHook', 'DvcliveLoggerHook', 'MomentumUpdaterHook',
'StepMomentumUpdaterHook', 'CosineAnnealingMomentumUpdaterHook',
'CyclicMomentumUpdaterHook', 'OneCycleMomentumUpdaterHook',
'SyncBuffersHook', 'EMAHook', 'EvalHook', 'DistEvalHook', 'ProfilerHook',
'GradientCumulativeOptimizerHook', 'GradientCumulativeFp16OptimizerHook'
] ]
...@@ -65,7 +65,7 @@ class EvalHook(Hook): ...@@ -65,7 +65,7 @@ class EvalHook(Hook):
**eval_kwargs: Evaluation arguments fed into the evaluate function of **eval_kwargs: Evaluation arguments fed into the evaluate function of
the dataset. the dataset.
Notes: Note:
If new arguments are added for EvalHook, tools/test.py, If new arguments are added for EvalHook, tools/test.py,
tools/eval_metric.py may be affected. tools/eval_metric.py may be affected.
""" """
......
...@@ -232,7 +232,7 @@ class CyclicMomentumUpdaterHook(MomentumUpdaterHook): ...@@ -232,7 +232,7 @@ class CyclicMomentumUpdaterHook(MomentumUpdaterHook):
This momentum scheduler usually used together with the CyclicLRUpdater This momentum scheduler usually used together with the CyclicLRUpdater
to improve the performance in the 3D detection area. to improve the performance in the 3D detection area.
Attributes: Args:
target_ratio (tuple[float]): Relative ratio of the lowest momentum and target_ratio (tuple[float]): Relative ratio of the lowest momentum and
the highest momentum to the initial momentum. the highest momentum to the initial momentum.
cyclic_times (int): Number of cycles during training cyclic_times (int): Number of cycles during training
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment