Unverified Commit 7ebc3ad8 authored by Nicolas Hug's avatar Nicolas Hug Committed by GitHub
Browse files

Improvements to gallery (#7883)

parent 224cbc83
...@@ -76,14 +76,44 @@ gen_rst.EXAMPLE_HEADER = """ ...@@ -76,14 +76,44 @@ gen_rst.EXAMPLE_HEADER = """
""" """
class CustomGalleryExampleSortKey:
# See https://sphinx-gallery.github.io/stable/configuration.html#sorting-gallery-examples
# and https://github.com/sphinx-gallery/sphinx-gallery/blob/master/sphinx_gallery/sorting.py
def __init__(self, src_dir):
self.src_dir = src_dir
transforms_subsection_order = [
"plot_transforms_getting_started.py",
"plot_transforms_e2e.py",
"plot_cutmix_mixup.py",
"plot_custom_transforms.py",
"plot_datapoints.py",
"plot_custom_datapoints.py",
]
def __call__(self, filename):
if "gallery/transforms" in self.src_dir:
try:
return self.transforms_subsection_order.index(filename)
except ValueError as e:
raise ValueError(
"Looks like you added an example in gallery/transforms? "
"You need to specify its order in docs/source/conf.py. Look for CustomGalleryExampleSortKey."
) from e
else:
# For other subsections we just sort alphabetically by filename
return filename
sphinx_gallery_conf = { sphinx_gallery_conf = {
"examples_dirs": "../../gallery/", # path to your example scripts "examples_dirs": "../../gallery/", # path to your example scripts
"gallery_dirs": "auto_examples", # path to where to save gallery generated output "gallery_dirs": "auto_examples", # path to where to save gallery generated output
"subsection_order": ExplicitOrder(["../../gallery/v2_transforms", "../../gallery/others"]), "subsection_order": ExplicitOrder(["../../gallery/transforms", "../../gallery/others"]),
"backreferences_dir": "gen_modules/backreferences", "backreferences_dir": "gen_modules/backreferences",
"doc_module": ("torchvision",), "doc_module": ("torchvision",),
"remove_config_comments": True, "remove_config_comments": True,
"ignore_pattern": "helpers.py", "ignore_pattern": "helpers.py",
"within_subsection_order": CustomGalleryExampleSortKey,
} }
napoleon_use_ivar = True napoleon_use_ivar = True
......
...@@ -8,7 +8,7 @@ Datapoints ...@@ -8,7 +8,7 @@ Datapoints
Datapoints are tensor subclasses which the :mod:`~torchvision.transforms.v2` v2 transforms use under the hood to Datapoints are tensor subclasses which the :mod:`~torchvision.transforms.v2` v2 transforms use under the hood to
dispatch their inputs to the appropriate lower-level kernels. Most users do not dispatch their inputs to the appropriate lower-level kernels. Most users do not
need to manipulate datapoints directly and can simply rely on dataset wrapping - need to manipulate datapoints directly and can simply rely on dataset wrapping -
see e.g. :ref:`sphx_glr_auto_examples_v2_transforms_plot_transforms_v2_e2e.py`. see e.g. :ref:`sphx_glr_auto_examples_transforms_plot_transforms_e2e.py`.
.. autosummary:: .. autosummary::
:toctree: generated/ :toctree: generated/
......
...@@ -113,7 +113,7 @@ do to is to update the import to ``torchvision.transforms.v2``. In terms of ...@@ -113,7 +113,7 @@ do to is to update the import to ``torchvision.transforms.v2``. In terms of
output, there might be negligible differences due to implementation differences. output, there might be negligible differences due to implementation differences.
To learn more about the v2 transforms, check out To learn more about the v2 transforms, check out
:ref:`sphx_glr_auto_examples_v2_transforms_plot_transforms_v2.py`. :ref:`sphx_glr_auto_examples_transforms_plot_transforms_getting_started.py`.
.. TODO: make sure link is still good!! .. TODO: make sure link is still good!!
...@@ -479,7 +479,7 @@ CutMix and MixUp are special transforms that ...@@ -479,7 +479,7 @@ CutMix and MixUp are special transforms that
are meant to be used on batches rather than on individual images, because they are meant to be used on batches rather than on individual images, because they
are combining pairs of images together. These can be used after the dataloader are combining pairs of images together. These can be used after the dataloader
(once the samples are batched), or part of a collation function. See (once the samples are batched), or part of a collation function. See
:ref:`sphx_glr_auto_examples_v2_transforms_plot_cutmix_mixup.py` for detailed usage examples. :ref:`sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py` for detailed usage examples.
.. autosummary:: .. autosummary::
:toctree: generated/ :toctree: generated/
......
.. _transforms_gallery: .. _transforms_gallery:
V2 transforms Transforms
------------- ----------
...@@ -5,12 +5,12 @@ How to write your own Datapoint class ...@@ -5,12 +5,12 @@ How to write your own Datapoint class
.. note:: .. note::
Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_custom_datapoints.ipynb>`_ Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_custom_datapoints.ipynb>`_
or :ref:`go to the end <sphx_glr_download_auto_examples_v2_transforms_plot_custom_datapoints.py>` to download the full example code. or :ref:`go to the end <sphx_glr_download_auto_examples_transforms_plot_custom_datapoints.py>` to download the full example code.
This guide is intended for advanced users and downstream library maintainers. We explain how to This guide is intended for advanced users and downstream library maintainers. We explain how to
write your own datapoint class, and how to make it compatible with the built-in write your own datapoint class, and how to make it compatible with the built-in
Torchvision v2 transforms. Before continuing, make sure you have read Torchvision v2 transforms. Before continuing, make sure you have read
:ref:`sphx_glr_auto_examples_v2_transforms_plot_datapoints.py`. :ref:`sphx_glr_auto_examples_transforms_plot_datapoints.py`.
""" """
# %% # %%
......
...@@ -5,7 +5,7 @@ How to write your own v2 transforms ...@@ -5,7 +5,7 @@ How to write your own v2 transforms
.. note:: .. note::
Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_custom_transforms.ipynb>`_ Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_custom_transforms.ipynb>`_
or :ref:`go to the end <sphx_glr_download_auto_examples_v2_transforms_plot_custom_transforms.py>` to download the full example code. or :ref:`go to the end <sphx_glr_download_auto_examples_transforms_plot_custom_transforms.py>` to download the full example code.
This guide explains how to write transforms that are compatible with the This guide explains how to write transforms that are compatible with the
torchvision transforms V2 API. torchvision transforms V2 API.
......
...@@ -6,7 +6,7 @@ How to use CutMix and MixUp ...@@ -6,7 +6,7 @@ How to use CutMix and MixUp
.. note:: .. note::
Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_cutmix_mixup.ipynb>`_ Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_cutmix_mixup.ipynb>`_
or :ref:`go to the end <sphx_glr_download_auto_examples_v2_transforms_plot_cutmix_mixup.py>` to download the full example code. or :ref:`go to the end <sphx_glr_download_auto_examples_transforms_plot_cutmix_mixup.py>` to download the full example code.
:class:`~torchvision.transforms.v2.CutMix` and :class:`~torchvision.transforms.v2.CutMix` and
:class:`~torchvision.transforms.v2.MixUp` are popular augmentation strategies :class:`~torchvision.transforms.v2.MixUp` are popular augmentation strategies
......
...@@ -5,7 +5,7 @@ Datapoints FAQ ...@@ -5,7 +5,7 @@ Datapoints FAQ
.. note:: .. note::
Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_datapoints.ipynb>`_ Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_datapoints.ipynb>`_
or :ref:`go to the end <sphx_glr_download_auto_examples_v2_transforms_plot_datapoints.py>` to download the full example code. or :ref:`go to the end <sphx_glr_download_auto_examples_transforms_plot_datapoints.py>` to download the full example code.
Datapoints are Tensor subclasses introduced together with Datapoints are Tensor subclasses introduced together with
......
...@@ -4,8 +4,8 @@ Transforms v2: End-to-end object detection/segmentation example ...@@ -4,8 +4,8 @@ Transforms v2: End-to-end object detection/segmentation example
=============================================================== ===============================================================
.. note:: .. note::
Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_transforms_v2_e2e.ipynb>`_ Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_transforms_e2e.ipynb>`_
or :ref:`go to the end <sphx_glr_download_auto_examples_v2_transforms_plot_transforms_v2_e2e.py>` to download the full example code. or :ref:`go to the end <sphx_glr_download_auto_examples_transforms_plot_transforms_e2e.py>` to download the full example code.
Object detection and segmentation tasks are natively supported: Object detection and segmentation tasks are natively supported:
``torchvision.transforms.v2`` enables jointly transforming images, videos, ``torchvision.transforms.v2`` enables jointly transforming images, videos,
......
...@@ -4,8 +4,8 @@ Getting started with transforms v2 ...@@ -4,8 +4,8 @@ Getting started with transforms v2
================================== ==================================
.. note:: .. note::
Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_transforms_v2.ipynb>`_ Try on `collab <https://colab.research.google.com/github/pytorch/vision/blob/gh-pages/main/_generated_ipynb_notebooks/plot_transforms_getting_started.ipynb>`_
or :ref:`go to the end <sphx_glr_download_auto_examples_v2_transforms_plot_transforms_v2.py>` to download the full example code. or :ref:`go to the end <sphx_glr_download_auto_examples_transforms_plot_transforms_getting_started.py>` to download the full example code.
This example illustrates all of what you need to know to get started with the This example illustrates all of what you need to know to get started with the
new :mod:`torchvision.transforms.v2` API. We'll cover simple tasks like new :mod:`torchvision.transforms.v2` API. We'll cover simple tasks like
...@@ -70,7 +70,7 @@ plot([img, out]) ...@@ -70,7 +70,7 @@ plot([img, out])
# <transforms>` to learn more about recommended practices and conventions, or # <transforms>` to learn more about recommended practices and conventions, or
# explore more :ref:`examples <transforms_gallery>` e.g. how to use augmentation # explore more :ref:`examples <transforms_gallery>` e.g. how to use augmentation
# transforms like :ref:`CutMix and MixUp # transforms like :ref:`CutMix and MixUp
# <sphx_glr_auto_examples_v2_transforms_plot_cutmix_mixup.py>`. # <sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py>`.
# #
# .. note:: # .. note::
# #
...@@ -148,7 +148,7 @@ print(f"{img_dp.dtype = }, {img_dp.shape = }, {img_dp.sum() = }") ...@@ -148,7 +148,7 @@ print(f"{img_dp.dtype = }, {img_dp.shape = }, {img_dp.sum() = }")
# #
# You don't need to know much more about datapoints at this point, but advanced # You don't need to know much more about datapoints at this point, but advanced
# users who want to learn more can refer to # users who want to learn more can refer to
# :ref:`sphx_glr_auto_examples_v2_transforms_plot_datapoints.py`. # :ref:`sphx_glr_auto_examples_transforms_plot_datapoints.py`.
# #
# What do I pass as input? # What do I pass as input?
# ------------------------ # ------------------------
...@@ -243,7 +243,7 @@ print(f"{out_target['this_is_ignored']}") ...@@ -243,7 +243,7 @@ print(f"{out_target['this_is_ignored']}")
# #
# from torchvision.datasets import CocoDetection, wrap_dataset_for_transforms_v2 # from torchvision.datasets import CocoDetection, wrap_dataset_for_transforms_v2
# #
# dataset = CocoDetection(..., transforms=my_v2_transforms) # dataset = CocoDetection(..., transforms=my_transforms)
# dataset = wrap_dataset_for_transforms_v2(dataset) # dataset = wrap_dataset_for_transforms_v2(dataset)
# # Now the dataset returns datapoints! # # Now the dataset returns datapoints!
# #
......
...@@ -17,7 +17,7 @@ class Datapoint(torch.Tensor): ...@@ -17,7 +17,7 @@ class Datapoint(torch.Tensor):
You probably don't want to use this class unless you're defining your own You probably don't want to use this class unless you're defining your own
custom Datapoints. See custom Datapoints. See
:ref:`sphx_glr_auto_examples_v2_transforms_plot_custom_datapoints.py` for details. :ref:`sphx_glr_auto_examples_transforms_plot_custom_datapoints.py` for details.
""" """
@staticmethod @staticmethod
......
...@@ -216,7 +216,7 @@ class MixUp(_BaseMixUpCutMix): ...@@ -216,7 +216,7 @@ class MixUp(_BaseMixUpCutMix):
.. note:: .. note::
This transform is meant to be used on **batches** of samples, not This transform is meant to be used on **batches** of samples, not
individual images. See individual images. See
:ref:`sphx_glr_auto_examples_v2_transforms_plot_cutmix_mixup.py` for detailed usage :ref:`sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py` for detailed usage
examples. examples.
The sample pairing is deterministic and done by matching consecutive The sample pairing is deterministic and done by matching consecutive
samples in the batch, so the batch needs to be shuffled (this is an samples in the batch, so the batch needs to be shuffled (this is an
...@@ -266,7 +266,7 @@ class CutMix(_BaseMixUpCutMix): ...@@ -266,7 +266,7 @@ class CutMix(_BaseMixUpCutMix):
.. note:: .. note::
This transform is meant to be used on **batches** of samples, not This transform is meant to be used on **batches** of samples, not
individual images. See individual images. See
:ref:`sphx_glr_auto_examples_v2_transforms_plot_cutmix_mixup.py` for detailed usage :ref:`sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py` for detailed usage
examples. examples.
The sample pairing is deterministic and done by matching consecutive The sample pairing is deterministic and done by matching consecutive
samples in the batch, so the batch needs to be shuffled (this is an samples in the batch, so the batch needs to be shuffled (this is an
......
...@@ -69,7 +69,7 @@ _BUILTIN_DATAPOINT_TYPES = { ...@@ -69,7 +69,7 @@ _BUILTIN_DATAPOINT_TYPES = {
def register_kernel(functional, datapoint_cls): def register_kernel(functional, datapoint_cls):
"""[BETA] Decorate a kernel to register it for a functional and a (custom) datapoint type. """[BETA] Decorate a kernel to register it for a functional and a (custom) datapoint type.
See :ref:`sphx_glr_auto_examples_v2_transforms_plot_custom_datapoints.py` for usage See :ref:`sphx_glr_auto_examples_transforms_plot_custom_datapoints.py` for usage
details. details.
""" """
if isinstance(functional, str): if isinstance(functional, str):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment