Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
456b390c
"...git@developer.sourcefind.cn:renzhc/diffusers_dcu.git" did not exist on "b7058d142c162bc33f1f47f7909e0913ead3d248"
Unverified
Commit
456b390c
authored
Mar 07, 2022
by
Yuge Zhang
Committed by
GitHub
Mar 07, 2022
Browse files
[doc] Full reference TOC (#4618)
parent
42c3076d
Changes
22
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
336 additions
and
9 deletions
+336
-9
dependencies/develop.txt
dependencies/develop.txt
+1
-1
dependencies/recommended.txt
dependencies/recommended.txt
+1
-0
dependencies/recommended_gpu.txt
dependencies/recommended_gpu.txt
+1
-0
dependencies/recommended_legacy.txt
dependencies/recommended_legacy.txt
+1
-0
docs/.gitignore
docs/.gitignore
+3
-0
docs/extension/patch_autodoc.py
docs/extension/patch_autodoc.py
+178
-0
docs/source/conf.py
docs/source/conf.py
+16
-1
docs/source/index.rst
docs/source/index.rst
+5
-1
docs/source/index_zh.rst
docs/source/index_zh.rst
+1
-1
docs/source/reference.rst
docs/source/reference.rst
+4
-0
docs/source/reference/python_api.rst
docs/source/reference/python_api.rst
+10
-0
docs/source/reference_zh.rst
docs/source/reference_zh.rst
+3
-1
docs/templates/autosummary/module.rst
docs/templates/autosummary/module.rst
+68
-0
nni/compression/pytorch/speedup/jit_translate.py
nni/compression/pytorch/speedup/jit_translate.py
+18
-1
nni/experiment/config/utils/internal.py
nni/experiment/config/utils/internal.py
+9
-0
nni/experiment/config/utils/public.py
nni/experiment/config/utils/public.py
+6
-0
nni/retiarii/execution/base.py
nni/retiarii/execution/base.py
+2
-0
nni/retiarii/nn/pytorch/cell.py
nni/retiarii/nn/pytorch/cell.py
+3
-1
nni/retiarii/nn/pytorch/component.py
nni/retiarii/nn/pytorch/component.py
+3
-1
nni/retiarii/nn/pytorch/nasbench101.py
nni/retiarii/nn/pytorch/nasbench101.py
+3
-1
No files found.
dependencies/develop.txt
View file @
456b390c
...
@@ -9,7 +9,7 @@ pytest
...
@@ -9,7 +9,7 @@ pytest
pytest-azurepipelines
pytest-azurepipelines
pytest-cov
pytest-cov
rstcheck
rstcheck
sphinx
sphinx
>= 4.4
sphinx-argparse-nni >= 0.4.0
sphinx-argparse-nni >= 0.4.0
sphinx-gallery
sphinx-gallery
sphinxcontrib-bibtex
sphinxcontrib-bibtex
...
...
dependencies/recommended.txt
View file @
456b390c
...
@@ -9,6 +9,7 @@ torchvision == 0.11.1+cpu ; sys_platform != "darwin"
...
@@ -9,6 +9,7 @@ torchvision == 0.11.1+cpu ; sys_platform != "darwin"
torchvision == 0.11.1 ; sys_platform == "darwin"
torchvision == 0.11.1 ; sys_platform == "darwin"
pytorch-lightning >= 1.5.0
pytorch-lightning >= 1.5.0
torchmetrics
torchmetrics
lightgbm
onnx
onnx
peewee
peewee
graphviz
graphviz
...
...
dependencies/recommended_gpu.txt
View file @
456b390c
...
@@ -5,6 +5,7 @@ tensorflow
...
@@ -5,6 +5,7 @@ tensorflow
torch == 1.10.0+cu111
torch == 1.10.0+cu111
torchvision == 0.11.1+cu111
torchvision == 0.11.1+cu111
pytorch-lightning >= 1.5.0
pytorch-lightning >= 1.5.0
lightgbm
onnx
onnx
peewee
peewee
graphviz
graphviz
...
...
dependencies/recommended_legacy.txt
View file @
456b390c
...
@@ -7,6 +7,7 @@ torchvision == 0.8.2+cpu
...
@@ -7,6 +7,7 @@ torchvision == 0.8.2+cpu
pytorch-lightning
pytorch-lightning
torchmetrics
torchmetrics
lightgbm
onnx
onnx
peewee
peewee
graphviz
graphviz
...
...
docs/.gitignore
View file @
456b390c
...
@@ -2,3 +2,6 @@ build/
...
@@ -2,3 +2,6 @@ build/
# legacy build
# legacy build
_build/
_build/
# auto-generated reference table
_modules/
docs/extension/patch_autodoc.py
0 → 100644
View file @
456b390c
"""Hack autodoc to get more fine-grained docstring rendering contol.
autodoc and autosummary didn't expose many of their controls to sphinx users via config.
To customize them, the "correct" approach seems to copy and paste all their code and rewrite some part.
To avoid doing this, I monkey-patched some of the functions to keep the changes minimal.
Note that some of them are related to sphinx internal APIs, which can be broken when sphinx got upgraded.
Try to keep them updated, or pin to a particular sphinx version.
"""
import
inspect
import
os
from
typing
import
List
,
Tuple
,
List
import
sphinx
from
docutils
import
nodes
from
docutils.nodes
import
Node
class
ClassNewBlacklistPatch
:
"""Force some classes to skip ``__new__`` when generating signature."""
original
=
None
def
restore
(
self
,
*
args
,
**
kwargs
):
assert
self
.
original
is
not
None
sphinx
.
ext
.
autodoc
.
_CLASS_NEW_BLACKLIST
=
self
.
original
def
patch
(
self
,
*
args
,
**
kwargs
):
self
.
original
=
sphinx
.
ext
.
autodoc
.
_CLASS_NEW_BLACKLIST
blacklist
=
[]
import
nni.retiarii.nn.pytorch
for
name
in
dir
(
nni
.
retiarii
.
nn
.
pytorch
):
obj
=
getattr
(
nni
.
retiarii
.
nn
.
pytorch
,
name
)
if
inspect
.
isclass
(
obj
):
new_name
=
"{0.__module__}.{0.__qualname__}"
.
format
(
obj
.
__new__
)
if
new_name
not
in
blacklist
:
blacklist
.
append
(
new_name
)
sphinx
.
ext
.
autodoc
.
_CLASS_NEW_BLACKLIST
=
self
.
original
+
blacklist
def
disable_trace_patch
(
*
args
,
**
kwargs
):
"""Disable trace by setting an environment variable."""
os
.
environ
[
'NNI_TRACE_FLAG'
]
=
'DISABLE'
def
trial_tool_import_patch
(
*
args
,
**
kwargs
):
"""Insert dummy trial tool variable to ensure trial_tool can be imported.
See nni/tools/trial_tool/constants.py
"""
os
.
environ
.
update
({
'NNI_OUTPUT_DIR'
:
'/tmp'
,
'NNI_PLATFORM'
:
'unittest'
,
'NNI_SYS_DIR'
:
'/tmp'
,
'NNI_TRIAL_JOB_ID'
:
'dummy'
,
'NNI_EXP_ID'
:
'dummy'
,
'MULTI_PHASE'
:
'dummy'
})
class
AutoSummaryPatch
:
"""Ignore certain files as they are completely un-importable. It patches:
- find_autosummary_in_files: Some modules cannot be imported at all due to dependency issues or some special design.
They need to skipped when running autosummary generate.
- Autosummary.get_table: The original autosummary creates an index for each module, and the module links in autosummary table
points to the corresponding generated module page (by using ``:py:module:xxx``). This doesn't work for us,
because we have used automodule else (other than autosummary) in our docs, and to avoid duplicate index,
we have to set ``:noindex:`` in autosummary template (see docs/templates/autosummary/module.rst).
This breaks most of the links, where they fail to link to generated module page by using index.
We here update the python domain role, to a general domain role (``:doc:``), and link to the page directly.
"""
find_autosummary_original
=
None
get_table_original
=
None
def
restore
(
self
,
*
args
,
**
kwargs
):
assert
self
.
find_autosummary_original
is
not
None
and
self
.
get_table_original
is
not
None
sphinx
.
ext
.
autosummary
.
generate
.
find_autosummary_in_files
=
self
.
find_autosummary_original
sphinx
.
ext
.
autosummary
.
Autosummary
.
get_table
=
self
.
get_table_original
def
patch
(
self
,
app
,
config
):
from
sphinx.ext.autosummary
import
Autosummary
from
sphinx.ext.autosummary.generate
import
AutosummaryEntry
self
.
find_autosummary_original
=
sphinx
.
ext
.
autosummary
.
generate
.
find_autosummary_in_files
self
.
get_table_original
=
Autosummary
.
get_table
def
find_autosummary_in_files
(
filenames
:
List
[
str
])
->
List
[
AutosummaryEntry
]:
items
:
List
[
AutosummaryEntry
]
=
self
.
find_autosummary_original
(
filenames
)
items
=
[
item
for
item
in
items
if
item
.
name
not
in
config
.
autosummary_mock_imports
]
return
items
def
get_table
(
autosummary
,
items
:
List
[
Tuple
[
str
,
str
,
str
,
str
]])
->
List
[
Node
]:
col_spec
,
autosummary_table
=
self
.
get_table_original
(
autosummary
,
items
)
if
'toctree'
in
autosummary
.
options
:
# probably within modules
table
=
autosummary_table
[
0
]
tgroup
=
table
[
0
]
tbody
=
tgroup
[
-
1
]
for
row
in
tbody
:
entry
=
row
[
0
]
paragraph
=
entry
[
0
]
pending_xref
=
paragraph
[
0
]
# get the reference path and check whether it has been generated
# if path to reference is changed, this should also be changed
reftarget_path
=
'reference/_modules/'
+
pending_xref
[
'reftarget'
]
if
reftarget_path
in
autosummary
.
env
.
found_docs
:
# make :py:obj:`xxx` looks like a :doc:`xxx`
pending_xref
[
'refdomain'
]
=
'std'
pending_xref
[
'reftype'
]
=
'doc'
pending_xref
[
'refexplicit'
]
=
False
pending_xref
[
'refwarn'
]
=
True
pending_xref
[
'reftarget'
]
=
'/'
+
reftarget_path
# a special tag to enable `ResolveDocPatch`
pending_xref
[
'refkeepformat'
]
=
True
return
[
col_spec
,
autosummary_table
]
sphinx
.
ext
.
autosummary
.
generate
.
find_autosummary_in_files
=
find_autosummary_in_files
sphinx
.
ext
.
autosummary
.
Autosummary
.
get_table
=
get_table
class
ResolveDocPatch
:
"""Original :doc: role throws away all the format, and keep raw text only.
We wish to keep module names literal. This patch is to keep literal format in :doc: resolver."""
original
=
None
def
restore
(
self
,
*
args
,
**
kwargs
):
assert
self
.
original
is
not
None
sphinx
.
domains
.
std
.
StandardDomain
.
_resolve_doc_xref
=
self
.
original
def
patch
(
self
,
*
args
,
**
kwargs
):
self
.
original
=
sphinx
.
domains
.
std
.
StandardDomain
.
_resolve_doc_xref
def
doc_xref_resolver
(
std_domain
,
env
,
fromdocname
,
builder
,
typ
,
target
,
node
,
contnode
):
if
not
node
.
get
(
'refkeepformat'
):
# redirect to original implementation to make it safer
return
self
.
original
(
std_domain
,
env
,
fromdocname
,
builder
,
typ
,
target
,
node
,
contnode
)
# directly reference to document by source name; can be absolute or relative
from
sphinx.domains.std
import
docname_join
,
make_refnode
refdoc
=
node
.
get
(
'refdoc'
,
fromdocname
)
docname
=
docname_join
(
refdoc
,
node
[
'reftarget'
])
if
docname
not
in
env
.
all_docs
:
return
None
else
:
innernode
=
node
[
0
]
# no astext here, to keep literal intact
return
make_refnode
(
builder
,
fromdocname
,
docname
,
None
,
innernode
)
sphinx
.
domains
.
std
.
StandardDomain
.
_resolve_doc_xref
=
doc_xref_resolver
def
setup
(
app
):
# See life-cycle of sphinx app here:
# https://www.sphinx-doc.org/en/master/extdev/appapi.html#sphinx-core-events
patch
=
ClassNewBlacklistPatch
()
app
.
connect
(
'env-before-read-docs'
,
patch
.
patch
)
app
.
connect
(
'env-merge-info'
,
patch
.
restore
)
patch
=
ResolveDocPatch
()
app
.
connect
(
'env-before-read-docs'
,
patch
.
patch
)
app
.
connect
(
'env-merge-info'
,
patch
.
restore
)
app
.
connect
(
'env-before-read-docs'
,
disable_trace_patch
)
# autosummary generate happens at builder-inited
app
.
connect
(
'config-inited'
,
trial_tool_import_patch
)
autosummary_patch
=
AutoSummaryPatch
()
app
.
connect
(
'config-inited'
,
autosummary_patch
.
patch
)
app
.
connect
(
'env-merge-info'
,
autosummary_patch
.
restore
)
docs/source/conf.py
View file @
456b390c
...
@@ -45,6 +45,7 @@ release = 'v2.6'
...
@@ -45,6 +45,7 @@ release = 'v2.6'
extensions
=
[
extensions
=
[
'sphinx_gallery.gen_gallery'
,
'sphinx_gallery.gen_gallery'
,
'sphinx.ext.autodoc'
,
'sphinx.ext.autodoc'
,
'sphinx.ext.autosummary'
,
'sphinx.ext.mathjax'
,
'sphinx.ext.mathjax'
,
'sphinxarg4nni.ext'
,
'sphinxarg4nni.ext'
,
'sphinx.ext.napoleon'
,
'sphinx.ext.napoleon'
,
...
@@ -59,10 +60,24 @@ extensions = [
...
@@ -59,10 +60,24 @@ extensions = [
'inplace_translation'
,
'inplace_translation'
,
'cardlinkitem'
,
'cardlinkitem'
,
'patch_docutils'
,
'patch_docutils'
,
'patch_autodoc'
,
]
]
# Autosummary related settings
autosummary_imported_members
=
True
autosummary_ignore_module_all
=
False
# Auto-generate stub files before building docs
autosummary_generate
=
True
# Add mock modules
# Add mock modules
autodoc_mock_imports
=
[
'apex'
,
'nni_node'
,
'tensorrt'
,
'pycuda'
,
'nn_meter'
]
autodoc_mock_imports
=
[
'apex'
,
'nni_node'
,
'tensorrt'
,
'pycuda'
,
'nn_meter'
,
'azureml'
]
autosummary_mock_imports
=
[
'nni.retiarii.codegen.tensorflow'
,
'nni.nas.benchmarks.nasbench101.db_gen'
,
'nni.tools.jupyter_extension.management'
,
]
+
autodoc_mock_imports
# Bibliography files
# Bibliography files
bibtex_bibfiles
=
[
'refs.bib'
]
bibtex_bibfiles
=
[
'refs.bib'
]
...
...
docs/source/index.rst
View file @
456b390c
...
@@ -32,7 +32,11 @@ Neural Network Intelligence
...
@@ -32,7 +32,11 @@ Neural Network Intelligence
:caption: References
:caption: References
:hidden:
:hidden:
References <reference>
nnictl Commands <reference/nnictl>
Experiment Configuration <reference/experiment_config>
Experiment Configuration (legacy) <Tutorial/ExperimentConfig>
Search Space <Tutorial/SearchSpaceSpec>
Python API <reference/_modules/nni>
.. toctree::
.. toctree::
:maxdepth: 2
:maxdepth: 2
...
...
docs/source/index_zh.rst
View file @
456b390c
..
ef76cab17df95cdf2b872fdba5dffa38
..
cbe5c6f0f6b5a054dc36d05b49d1986e
###########################
###########################
Neural Network Intelligence
Neural Network Intelligence
...
...
docs/source/reference.rst
View file @
456b390c
:orphan:
.. to be removed
References
References
==================
==================
...
...
docs/source/reference/python_api.rst
0 → 100644
View file @
456b390c
:orphan:
Python API Reference
====================
.. autosummary::
:toctree: _modules
:recursive:
nni
docs/source/reference_zh.rst
View file @
456b390c
.. 93a98b510c2b13cb6c1c24d66e914034
.. 317504c3009932f8a566616e85a9700f
:orphan:
参考
参考
==================
==================
...
...
docs/templates/autosummary/module.rst
0 → 100644
View file @
456b390c
.. Modified from https://raw.githubusercontent.com/sphinx-doc/sphinx/4.x/sphinx/ext/autosummary/templates/autosummary/module.rst
{% if fullname == 'nni' %}
Python API Reference
====================
{% else %}
{{ fullname | escape | underline }}
{% endif %}
.. automodule:: {{ fullname }}
:noindex:
{% block attributes %}
{% if attributes %}
.. rubric:: {{ _('Module Attributes') }}
.. autosummary::
{% for item in attributes %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
{% block functions %}
{% if functions %}
.. rubric:: {{ _('Functions') }}
.. autosummary::
{% for item in functions %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
{% block classes %}
{% if classes %}
.. rubric:: {{ _('Classes') }}
.. autosummary::
{% for item in classes %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
{% block exceptions %}
{% if exceptions %}
.. rubric:: {{ _('Exceptions') }}
.. autosummary::
{% for item in exceptions %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
{% block modules %}
{% if modules %}
.. rubric:: Modules
.. autosummary::
:toctree:
:recursive:
{% for item in modules %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
nni/compression/pytorch/speedup/jit_translate.py
View file @
456b390c
...
@@ -10,16 +10,31 @@ import torch
...
@@ -10,16 +10,31 @@ import torch
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
logger
.
setLevel
(
logging
.
INFO
)
logger
.
setLevel
(
logging
.
INFO
)
# to exclude partial
__all__
=
[
'adaptive_avgpool_python'
,
'add_python'
,
'avgpool2d_python'
,
'cat_python'
,
'contiguous_python'
,
'div_python'
,
'dropout_python'
,
'exp_python'
,
'flatten_python'
,
'floor_div_python'
,
'gelu_python'
,
'getattr_python'
,
'jit_to_python_function'
,
'matmul_python'
,
'mean_python'
,
'mul_python'
,
'num2tensor_python'
,
'parse_constant'
,
'permute_python'
,
'relu_inplace_python'
,
'relu_python'
,
'reshape_python'
,
'select_python'
,
'sigmoid_python'
,
'size_python'
,
'slice_python'
,
'softmax_python'
,
'squeeze_python'
,
'to_python'
,
'toint_python'
,
'torch'
,
'trans_from_jit_to_python'
,
'translate_list'
,
'transpose2_python'
,
'transpose_python'
,
'tupleunpack_python'
,
'typeas_python'
,
'unsqueeze_python'
,
'upsample_bilinear2d_python'
,
'view_python'
]
def
translate_list
(
list_node
,
speedup
=
None
):
def
translate_list
(
list_node
,
speedup
=
None
):
"""
"""
Get the list of values from the list construct node.
Get the list of values from the list construct node.
Parameters
Parameters
---------
---------
-
list_node: Torch.C.Value
list_node: Torch.C.Value
The cpp node of the target list.
The cpp node of the target list.
speedup: ModuleSpeed
speedup: ModuleSpeed
The Module speedup module.
The Module speedup module.
Returns
Returns
-------
-------
values: list
values: list
...
@@ -45,12 +60,14 @@ def translate_list(list_node, speedup=None):
...
@@ -45,12 +60,14 @@ def translate_list(list_node, speedup=None):
def
parse_constant
(
cvalue
,
speedup
):
def
parse_constant
(
cvalue
,
speedup
):
"""
"""
Parse the constant values from this Node
Parse the constant values from this Node
Parameters
Parameters
----------
----------
cvalue: Torch.C.Value
cvalue: Torch.C.Value
The cpp node of the target constant value.
The cpp node of the target constant value.
speedup: ModelSpeedup
speedup: ModelSpeedup
The Model speedup module.
The Model speedup module.
Returns
Returns
-------
-------
value: int/float/tensor
value: int/float/tensor
...
...
nni/experiment/config/utils/internal.py
View file @
456b390c
...
@@ -20,6 +20,15 @@ import nni.runtime.config
...
@@ -20,6 +20,15 @@ import nni.runtime.config
from
.public
import
is_missing
from
.public
import
is_missing
__all__
=
[
'get_base_path'
,
'set_base_path'
,
'unset_base_path'
,
'resolve_path'
,
'case_insensitive'
,
'camel_case'
,
'is_instance'
,
'validate_type'
,
'is_path_like'
,
'guess_config_type'
,
'guess_list_config_type'
,
'training_service_config_factory'
,
'load_training_service_config'
,
'get_ipv4_address'
]
## handle relative path ##
## handle relative path ##
_current_base_path
=
None
_current_base_path
=
None
...
...
nni/experiment/config/utils/public.py
View file @
456b390c
...
@@ -10,6 +10,12 @@ import math
...
@@ -10,6 +10,12 @@ import math
from
pathlib
import
Path
from
pathlib
import
Path
from
typing
import
Union
from
typing
import
Union
__all__
=
[
'PathLike'
,
'is_missing'
,
'canonical_gpu_indices'
,
'validate_gpu_indices'
,
'parse_time'
,
'parse_memory_size'
]
PathLike
=
Union
[
Path
,
str
]
PathLike
=
Union
[
Path
,
str
]
def
is_missing
(
value
):
def
is_missing
(
value
):
...
...
nni/retiarii/execution/base.py
View file @
456b390c
...
@@ -17,6 +17,8 @@ _logger = logging.getLogger(__name__)
...
@@ -17,6 +17,8 @@ _logger = logging.getLogger(__name__)
class
BaseGraphData
:
class
BaseGraphData
:
"""
"""
Data sent between strategy and trial, in graph-based execution engine.
Attributes
Attributes
----------
----------
model_script
model_script
...
...
nni/retiarii/nn/pytorch/cell.py
View file @
456b390c
...
@@ -32,7 +32,9 @@ _cell_op_factory_type = Callable[[int, int, Optional[int]], nn.Module]
...
@@ -32,7 +32,9 @@ _cell_op_factory_type = Callable[[int, int, Optional[int]], nn.Module]
class
Cell
(
nn
.
Module
):
class
Cell
(
nn
.
Module
):
"""
"""
Cell structure :footcite:p:`zoph2017neural,zoph2018learning,liu2018darts` that is popularly used in NAS literature.
Cell structure that is popularly used in NAS literature.
Refer to :footcite:t:`zoph2017neural,zoph2018learning,liu2018darts` for details.
:footcite:t:`radosavovic2019network` is a good summary of how this structure works in practice.
:footcite:t:`radosavovic2019network` is a good summary of how this structure works in practice.
A cell consists of multiple "nodes". Each node is a sum of multiple operators. Each operator is chosen from
A cell consists of multiple "nodes". Each node is a sum of multiple operators. Each operator is chosen from
...
...
nni/retiarii/nn/pytorch/component.py
View file @
456b390c
...
@@ -100,7 +100,9 @@ class Repeat(Mutable):
...
@@ -100,7 +100,9 @@ class Repeat(Mutable):
class
NasBench201Cell
(
nn
.
Module
):
class
NasBench201Cell
(
nn
.
Module
):
"""
"""
Cell structure that is proposed in NAS-Bench-201 :footcite:p:`dong2019bench`.
Cell structure that is proposed in NAS-Bench-201.
Refer to :footcite:t:`dong2019bench` for details.
This cell is a densely connected DAG with ``num_tensors`` nodes, where each node is tensor.
This cell is a densely connected DAG with ``num_tensors`` nodes, where each node is tensor.
For every i < j, there is an edge from i-th node to j-th node.
For every i < j, there is an edge from i-th node to j-th node.
...
...
nni/retiarii/nn/pytorch/nasbench101.py
View file @
456b390c
...
@@ -219,7 +219,9 @@ class _NasBench101CellFixed(nn.Module):
...
@@ -219,7 +219,9 @@ class _NasBench101CellFixed(nn.Module):
class
NasBench101Cell
(
Mutable
):
class
NasBench101Cell
(
Mutable
):
"""
"""
Cell structure that is proposed in NAS-Bench-101 :footcite:p:`ying2019bench`.
Cell structure that is proposed in NAS-Bench-101.
Refer to :footcite:t:`ying2019bench` for details.
This cell is usually used in evaluation of NAS algorithms because there is a "comprehensive analysis" of this search space
This cell is usually used in evaluation of NAS algorithms because there is a "comprehensive analysis" of this search space
available, which includes a full architecture-dataset that "maps 423k unique architectures to metrics
available, which includes a full architecture-dataset that "maps 423k unique architectures to metrics
...
...
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment