Commit cc26cd81 authored by panning's avatar panning
Browse files

merge v0.16.0

parents f78f29f5 fbb4cc54
#!/usr/bin/env bash
set -ex
if [ "$2" == "" ]; then
echo call as "$0" "<src>" "<target branch>"
echo where src is the root of the built documentation git checkout and
echo branch should be "main" or "1.7" or so
exit 1
fi
src=$1
target=$2
echo "committing docs from ${src} to ${target}"
pushd "${src}"
git checkout gh-pages
mkdir -p ./"${target}"
rm -rf ./"${target}"/*
cp -r "${src}/docs/build/html/"* ./"$target"
if [ "${target}" == "main" ]; then
mkdir -p ./_static
rm -rf ./_static/*
cp -r "${src}/docs/build/html/_static/"* ./_static
git add --all ./_static || true
fi
git add --all ./"${target}" || true
git config user.email "soumith+bot@pytorch.org"
git config user.name "pytorchbot"
# If there aren't changes, don't make a commit; push is no-op
git commit -m "auto-generating sphinx docs" || true
git remote add https https://github.com/pytorch/vision.git
git push -u https gh-pages
version: 2.1
# How to test the Linux jobs:
# - Install CircleCI local CLI: https://circleci.com/docs/2.0/local-cli/
# - circleci config process .circleci/config.yml > gen.yml && circleci local execute -c gen.yml --job binary_linux_wheel_py3.7
# - Replace binary_linux_wheel_py3.7 with the name of the job you want to test.
# Job names are 'name:' key.
executors:
windows-cpu:
machine:
resource_class: windows.xlarge
image: windows-server-2019-vs2019:stable
shell: bash.exe
windows-gpu:
machine:
resource_class: windows.gpu.nvidia.medium
image: windows-server-2019-nvidia:stable
shell: bash.exe
commands:
checkout_merge:
description: "checkout merge branch"
steps:
- checkout
# - run:
# name: Checkout merge branch
# command: |
# set -ex
# BRANCH=$(git rev-parse --abbrev-ref HEAD)
# if [[ "$BRANCH" != "main" ]]; then
# git fetch --force origin ${CIRCLE_BRANCH}/merge:merged/${CIRCLE_BRANCH}
# git checkout "merged/$CIRCLE_BRANCH"
# fi
designate_upload_channel:
description: "inserts the correct upload channel into ${BASH_ENV}"
steps:
- run:
name: adding UPLOAD_CHANNEL to BASH_ENV
command: |
our_upload_channel=test
echo "export UPLOAD_CHANNEL=${our_upload_channel}" >> ${BASH_ENV}
brew_update:
description: "Update Homebrew and install base formulae"
steps:
- run:
name: Update Homebrew
no_output_timeout: "10m"
command: |
set -ex
# Update repositories manually.
# Running `brew update` produces a comparison between the
# current checkout and the updated checkout, which takes a
# very long time because the existing checkout is 2y old.
for path in $(find /usr/local/Homebrew -type d -name .git)
do
cd $path/..
git fetch --depth=1 origin
git reset --hard origin/master
done
export HOMEBREW_NO_AUTO_UPDATE=1
# Install expect and moreutils so that we can call `unbuffer` and `ts`.
# moreutils installs a `parallel` executable by default, which conflicts
# with the executable from the GNU `parallel`, so we must unlink GNU
# `parallel` first, and relink it afterwards.
brew install coreutils
brew unlink parallel
brew install moreutils
brew link parallel --overwrite
brew install expect
brew_install:
description: "Install Homebrew formulae"
parameters:
formulae:
type: string
default: ""
steps:
- run:
name: Install << parameters.formulae >>
no_output_timeout: "10m"
command: |
set -ex
export HOMEBREW_NO_AUTO_UPDATE=1
brew install << parameters.formulae >>
run_brew_for_ios_build:
steps:
- brew_update
- brew_install:
formulae: libtool
apt_install:
parameters:
args:
type: string
descr:
type: string
default: ""
update:
type: boolean
default: true
steps:
- run:
name: >
<<^ parameters.descr >> apt install << parameters.args >> <</ parameters.descr >>
<<# parameters.descr >> << parameters.descr >> <</ parameters.descr >>
command: |
<<# parameters.update >> sudo apt update -qy <</ parameters.update >>
sudo apt install << parameters.args >>
pip_install:
parameters:
args:
type: string
descr:
type: string
default: ""
user:
type: boolean
default: true
steps:
- run:
name: >
<<^ parameters.descr >> pip install << parameters.args >> <</ parameters.descr >>
<<# parameters.descr >> << parameters.descr >> <</ parameters.descr >>
command: >
pip install
<<# parameters.user >> --user <</ parameters.user >>
--progress-bar=off
<< parameters.args >>
install_torchvision:
parameters:
editable:
type: boolean
default: true
steps:
- pip_install:
args: --pre torch -f https://download.pytorch.org/whl/test/cpu/torch_test.html
descr: Install PyTorch from nightly releases
- pip_install:
args: --no-build-isolation <<# parameters.editable >> --editable <</ parameters.editable >> .
descr: Install torchvision <<# parameters.editable >> in editable mode <</ parameters.editable >>
# Most of the test suite is handled by the `unittest` jobs, with completely different workflow and setup.
# This command can be used if only a selection of tests need to be run, for ad-hoc files.
run_tests_selective:
parameters:
file_or_dir:
type: string
steps:
- run:
name: Install test utilities
command: pip install --progress-bar=off pytest pytest-mock
- run:
name: Run tests
command: pytest --junitxml=test-results/junit.xml -v --durations 20 <<parameters.file_or_dir>>
- store_test_results:
path: test-results
download_model_weights:
parameters:
extract_roots:
type: string
default: "torchvision/models"
background:
type: boolean
default: true
steps:
- apt_install:
args: parallel wget
descr: Install download utilitites
- run:
name: Download model weights
background: << parameters.background >>
command: |
mkdir -p ~/.cache/torch/hub/checkpoints
python scripts/collect_model_urls.py << parameters.extract_roots >> \
| parallel -j0 'wget --no-verbose -O ~/.cache/torch/hub/checkpoints/`basename {}` {}\?source=ci'
binary_common: &binary_common
parameters:
# Edit these defaults to do a release
build_version:
description: "version number of release binary; by default, build a nightly"
type: string
default: "0.14.1"
pytorch_version:
description: "PyTorch version to build against; by default, use a nightly"
type: string
default: "1.13.1"
# Don't edit these
python_version:
description: "Python version to build against (e.g., 3.7)"
type: string
cu_version:
description: "CUDA version to build against, in CU format (e.g., cpu or cu100)"
type: string
default: "cpu"
unicode_abi:
description: "Python 2.7 wheel only: whether or not we are cp27mu (default: no)"
type: string
default: ""
wheel_docker_image:
description: "Wheel only: what docker image to use"
type: string
default: ""
conda_docker_image:
description: "Conda only: what docker image to use"
type: string
default: "pytorch/conda-builder:cpu"
environment:
PYTHON_VERSION: << parameters.python_version >>
PYTORCH_VERSION: << parameters.pytorch_version >>
UNICODE_ABI: << parameters.unicode_abi >>
CU_VERSION: << parameters.cu_version >>
MACOSX_DEPLOYMENT_TARGET: 10.9
torchvision_ios_params: &torchvision_ios_params
parameters:
build_environment:
type: string
default: ""
ios_arch:
type: string
default: ""
ios_platform:
type: string
default: ""
environment:
BUILD_ENVIRONMENT: << parameters.build_environment >>
IOS_ARCH: << parameters.ios_arch >>
IOS_PLATFORM: << parameters.ios_platform >>
torchvision_android_params: &torchvision_android_params
parameters:
build_environment:
type: string
default: ""
environment:
BUILD_ENVIRONMENT: << parameters.build_environment >>
smoke_test_common: &smoke_test_common
<<: *binary_common
docker:
- image: torchvision/smoke_test:latest
jobs:
circleci_consistency:
docker:
- image: cimg/python:3.7
steps:
- checkout
- pip_install:
args: jinja2 pyyaml
- run:
name: Check CircleCI config consistency
command: |
python .circleci/regenerate.py
git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1)
lint_python_and_config:
docker:
- image: cimg/python:3.7
steps:
- checkout
- pip_install:
args: pre-commit
descr: Install lint utilities
- run:
name: Install pre-commit hooks
command: pre-commit install-hooks
- run:
name: Lint Python code and config files
command: pre-commit run --all-files
- run:
name: Required lint modifications
when: on_fail
command: git --no-pager diff
lint_c:
docker:
- image: cimg/python:3.7
steps:
- apt_install:
args: libtinfo5
descr: Install additional system libraries
- checkout
- run:
name: Install lint utilities
command: |
curl https://oss-clang-format.s3.us-east-2.amazonaws.com/linux64/clang-format-linux64 -o clang-format
chmod +x clang-format
sudo mv clang-format /opt/clang-format
- run:
name: Lint C code
command: ./.circleci/unittest/linux/scripts/run-clang-format.py -r torchvision/csrc --clang-format-executable /opt/clang-format
- run:
name: Required lint modifications
when: on_fail
command: git --no-pager diff
type_check_python:
docker:
- image: cimg/python:3.7
steps:
- checkout
- install_torchvision:
editable: true
- pip_install:
args: mypy
descr: Install Python type check utilities
- run:
name: Check Python types statically
command: mypy --install-types --non-interactive --config-file mypy.ini
unittest_torchhub:
docker:
- image: cimg/python:3.7
steps:
- checkout
- install_torchvision
- run_tests_selective:
file_or_dir: test/test_hub.py
unittest_onnx:
docker:
- image: cimg/python:3.7
steps:
- checkout
- install_torchvision
- pip_install:
args: onnx onnxruntime
descr: Install ONNX
- run_tests_selective:
file_or_dir: test/test_onnx.py
unittest_extended:
docker:
- image: cimg/python:3.7
resource_class: xlarge
steps:
- checkout
- download_model_weights
- install_torchvision
- run:
name: Enable extended tests
command: echo 'export PYTORCH_TEST_WITH_EXTENDED=1' >> $BASH_ENV
- run_tests_selective:
file_or_dir: test/test_extended_*.py
binary_linux_wheel:
<<: *binary_common
docker:
- image: << parameters.wheel_docker_image >>
resource_class: 2xlarge+
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build conda packages
no_output_timeout: 30m
command: |
set -ex
packaging/build_wheel.sh
- store_artifacts:
path: dist
- persist_to_workspace:
root: dist
paths:
- "*"
binary_linux_conda:
<<: *binary_common
docker:
- image: "<< parameters.conda_docker_image >>"
resource_class: 2xlarge+
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build conda packages
no_output_timeout: 30m
command: |
set -ex
packaging/build_conda.sh
- store_artifacts:
path: /opt/conda/conda-bld/linux-64
- persist_to_workspace:
root: /opt/conda/conda-bld/linux-64
paths:
- "*"
- store_test_results:
path: build_results/
binary_win_conda:
<<: *binary_common
executor: windows-cpu
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build conda packages
no_output_timeout: 30m
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/windows/internal/cuda_install.bat
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda activate base
conda install -yq conda-build "conda-package-handling!=1.5.0"
packaging/build_conda.sh
rm /C/tools/miniconda3/conda-bld/win-64/vs${VC_YEAR}*.tar.bz2
- store_artifacts:
path: C:/tools/miniconda3/conda-bld/win-64
- persist_to_workspace:
root: C:/tools/miniconda3/conda-bld/win-64
paths:
- "*"
- store_test_results:
path: build_results/
binary_win_wheel:
<<: *binary_common
executor: windows-cpu
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build wheel packages
no_output_timeout: 30m
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/windows/internal/cuda_install.bat
packaging/build_wheel.sh
- store_artifacts:
path: dist
- persist_to_workspace:
root: dist
paths:
- "*"
- store_test_results:
path: build_results/
binary_macos_wheel:
<<: *binary_common
macos:
xcode: "14.0"
steps:
- checkout_merge
- designate_upload_channel
- run:
# Cannot easily deduplicate this as source'ing activate
# will set environment variables which we need to propagate
# to build_wheel.sh
command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
sh conda.sh -b
source $HOME/miniconda3/bin/activate
packaging/build_wheel.sh
- store_artifacts:
path: dist
- persist_to_workspace:
root: dist
paths:
- "*"
binary_ios_build:
<<: *torchvision_ios_params
macos:
xcode: "14.0"
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run_brew_for_ios_build
- run:
name: Build
no_output_timeout: "1h"
command: |
script="/Users/distiller/project/.circleci/unittest/ios/scripts/binary_ios_build.sh"
cat "$script"
source "$script"
- persist_to_workspace:
root: /Users/distiller/workspace/
paths: ios
binary_ios_upload:
<<: *torchvision_ios_params
macos:
xcode: "14.0"
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run_brew_for_ios_build
- run:
name: Upload
no_output_timeout: "1h"
command: |
script="/Users/distiller/project/.circleci/unittest/ios/scripts/binary_ios_upload.sh"
cat "$script"
source "$script"
binary_android_build:
<<: *torchvision_android_params
docker:
- image: cimg/android:2021.08-ndk
resource_class: xlarge
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run:
name: Build
no_output_timeout: "1h"
command: |
script="/home/circleci/project/.circleci/unittest/android/scripts/binary_android_build.sh"
cat "$script"
source "$script"
- store_artifacts:
path: ~/workspace/artifacts
binary_android_upload:
<<: *torchvision_android_params
docker:
- image: cimg/android:2021.08-ndk
resource_class: xlarge
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run:
name: Upload
no_output_timeout: "1h"
command: |
script="/home/circleci/project/.circleci/unittest/android/scripts/binary_android_upload.sh"
cat "$script"
source "$script"
binary_macos_conda:
<<: *binary_common
macos:
xcode: "14.0"
steps:
- checkout_merge
- designate_upload_channel
- run:
command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
sh conda.sh -b
source $HOME/miniconda3/bin/activate
conda install -yq conda-build
packaging/build_conda.sh
- store_artifacts:
path: /Users/distiller/miniconda3/conda-bld/osx-64
- persist_to_workspace:
root: /Users/distiller/miniconda3/conda-bld/osx-64
paths:
- "*"
- store_test_results:
path: build_results/
# Requires org-member context
binary_conda_upload:
docker:
- image: continuumio/miniconda
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
command: |
# Prevent credential from leaking
conda install -yq anaconda-client
set -x
anaconda -t "${CONDA_PYTORCHBOT_TOKEN}" upload ~/workspace/*.tar.bz2 -u "pytorch-${UPLOAD_CHANNEL}" --label main --no-progress --force
# Requires org-member context
binary_wheel_upload:
parameters:
subfolder:
description: "What whl subfolder to upload to, e.g., blank or cu100/ (trailing slash is important)"
type: string
docker:
- image: cimg/python:3.7
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- checkout
- pip_install:
args: awscli
- run:
command: |
export PATH="$HOME/.local/bin:$PATH"
# Prevent credential from leaking
set +x
export AWS_ACCESS_KEY_ID="${PYTORCH_BINARY_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${PYTORCH_BINARY_AWS_SECRET_ACCESS_KEY}"
set -x
for pkg in ~/workspace/*.whl; do
aws s3 cp "$pkg" "s3://pytorch/whl/${UPLOAD_CHANNEL}/<< parameters.subfolder >>" --acl public-read
done
smoke_test_linux_conda:
<<: *smoke_test_common
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
conda install -v -y -c pytorch-nightly pytorch
conda install -v -y $(ls ~/workspace/torchvision*.tar.bz2)
- run:
name: smoke test
command: |
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
python -c "import torchvision"
smoke_test_linux_pip:
<<: *smoke_test_common
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
- pip_install:
args: $(ls ~/workspace/torchvision*.whl) --pre -f https://download.pytorch.org/whl/test/torch_test.html
- run:
name: smoke test
command: |
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
python -c "import torchvision"
smoke_test_docker_image_build:
machine:
image: ubuntu-2004:202104-01
resource_class: large
environment:
image_name: torchvision/smoke_test
steps:
- checkout
- designate_upload_channel
- run:
name: Build and push Docker image
no_output_timeout: "1h"
command: |
set +x
echo "${DOCKER_HUB_TOKEN}" | docker login --username "${DOCKER_HUB_USERNAME}" --password-stdin
set -x
cd .circleci/smoke_test/docker && docker build . -t ${image_name}:${CIRCLE_WORKFLOW_ID}
docker tag ${image_name}:${CIRCLE_WORKFLOW_ID} ${image_name}:latest
docker push ${image_name}:${CIRCLE_WORKFLOW_ID}
docker push ${image_name}:latest
smoke_test_win_conda:
<<: *binary_common
executor:
name: windows-cpu
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda env remove -n python${PYTHON_VERSION} || true
conda create -yn python${PYTHON_VERSION} python=${PYTHON_VERSION}
conda activate python${PYTHON_VERSION}
conda install -v -y -c pytorch-nightly pytorch
conda install -v -y $(ls ~/workspace/torchvision*.tar.bz2)
- run:
name: smoke test
command: |
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda activate python${PYTHON_VERSION}
python -c "import torchvision"
smoke_test_win_pip:
<<: *binary_common
executor:
name: windows-cpu
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda create -yn python${PYTHON_VERSION} python=${PYTHON_VERSION}
conda activate python${PYTHON_VERSION}
- pip_install:
args: $(ls ~/workspace/torchvision*.whl) --pre -f https://download.pytorch.org/whl/test/torch_test.html
- run:
name: smoke test
command: |
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda activate python${PYTHON_VERSION}
python -c "import torchvision"
unittest_linux_cpu:
<<: *binary_common
docker:
- image: "pytorch/manylinux-cpu"
resource_class: 2xlarge+
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
keys:
- env-v2-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
- run:
name: Setup
command: .circleci/unittest/linux/scripts/setup_env.sh
- save_cache:
key: env-v2-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
paths:
- conda
- env
- run:
name: Install torchvision
command: .circleci/unittest/linux/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/linux/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/linux/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_linux_gpu:
<<: *binary_common
machine:
image: ubuntu-2004-cuda-11.4:202110-01
resource_class: gpu.nvidia.medium
environment:
image_name: "pytorch/manylinux-cuda116"
CU_VERSION: << parameters.cu_version >>
PYTHON_VERSION: << parameters.python_version >>
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
keys:
- env-v3-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
- run:
name: Setup
command: docker run -e PYTHON_VERSION -t --gpus all -v $PWD:$PWD -w $PWD "${image_name}" .circleci/unittest/linux/scripts/setup_env.sh
- save_cache:
key: env-v3-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
paths:
- conda
- env
- run:
# Here we create an envlist file that contains some env variables that we want the docker container to be aware of.
# Normally, the CIRCLECI variable is set and available on all CI workflows: https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables.
# They're avaiable in all the other workflows (OSX and Windows).
# But here, we're running the unittest_linux_gpu workflows in a docker container, where those variables aren't accessible.
# So instead we dump the variables we need in env.list and we pass that file when invoking "docker run".
name: export CIRCLECI env var
command: echo "CIRCLECI=true" >> ./env.list
- run:
name: Install torchvision
command: docker run -t --gpus all -v $PWD:$PWD -w $PWD -e UPLOAD_CHANNEL -e CU_VERSION "${image_name}" .circleci/unittest/linux/scripts/install.sh
- run:
name: Run tests
command: docker run --env-file ./env.list -t --gpus all -v $PWD:$PWD -w $PWD "${image_name}" .circleci/unittest/linux/scripts/run_test.sh
- run:
name: Post Process
command: docker run -t --gpus all -v $PWD:$PWD -w $PWD "${image_name}" .circleci/unittest/linux/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_windows_cpu:
<<: *binary_common
executor:
name: windows-cpu
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
keys:
- env-v2-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
- run:
name: Setup
command: .circleci/unittest/windows/scripts/setup_env.sh
- save_cache:
key: env-v2-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
paths:
- conda
- env
- run:
name: Install torchvision
command: .circleci/unittest/windows/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/windows/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/windows/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_windows_gpu:
<<: *binary_common
executor:
name: windows-gpu
environment:
CUDA_VERSION: "11.6"
PYTHON_VERSION: << parameters.python_version >>
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
keys:
- env-v1-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
- run:
name: Setup
command: .circleci/unittest/windows/scripts/setup_env.sh
- save_cache:
key: env-v1-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
paths:
- conda
- env
- run:
name: Install CUDA
command: packaging/windows/internal/cuda_install.bat
- run:
name: Update CUDA driver
command: packaging/windows/internal/driver_update.bat
- run:
name: Install torchvision
command: .circleci/unittest/windows/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/windows/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/windows/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_macos_cpu:
<<: *binary_common
macos:
xcode: "14.0"
resource_class: large
steps:
- checkout
- designate_upload_channel
- run:
name: Install wget
command: HOMEBREW_NO_AUTO_UPDATE=1 brew install wget
# Disable brew auto update which is very slow
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
keys:
- env-v3-macos-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
- run:
name: Setup
command: .circleci/unittest/linux/scripts/setup_env.sh
- save_cache:
key: env-v3-macos-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
paths:
- conda
- env
- run:
name: Install torchvision
command: .circleci/unittest/linux/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/linux/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/linux/scripts/post_process.sh
- store_test_results:
path: test-results
cmake_linux_cpu:
<<: *binary_common
docker:
- image: "pytorch/manylinux-cpu"
resource_class: 2xlarge+
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Setup conda
command: .circleci/unittest/linux/scripts/setup_env.sh
- run: packaging/build_cmake.sh
cmake_linux_gpu:
<<: *binary_common
machine:
image: ubuntu-2004-cuda-11.4:202110-01
resource_class: gpu.nvidia.small
environment:
PYTHON_VERSION: << parameters.python_version >>
PYTORCH_VERSION: << parameters.pytorch_version >>
UNICODE_ABI: << parameters.unicode_abi >>
CU_VERSION: << parameters.cu_version >>
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Setup conda
command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> .circleci/unittest/linux/scripts/setup_env.sh
- run:
name: Build torchvision C++ distribution and test
no_output_timeout: 30m
command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -e UPLOAD_CHANNEL -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> packaging/build_cmake.sh
cmake_macos_cpu:
<<: *binary_common
macos:
xcode: "14.0"
steps:
- checkout_merge
- designate_upload_channel
- run:
command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
sh conda.sh -b
source $HOME/miniconda3/bin/activate
conda install -yq conda-build cmake
packaging/build_cmake.sh
cmake_windows_cpu:
<<: *binary_common
executor:
name: windows-cpu
steps:
- checkout_merge
- designate_upload_channel
- run:
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/build_cmake.sh
cmake_windows_gpu:
<<: *binary_common
executor:
name: windows-gpu
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Update CUDA driver
command: packaging/windows/internal/driver_update.bat
- run:
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/windows/internal/cuda_install.bat
packaging/build_cmake.sh
build_docs:
<<: *binary_common
docker:
- image: cimg/python:3.7
resource_class: 2xlarge+
steps:
- attach_workspace:
at: ~/workspace
- checkout
- download_model_weights
- run:
name: Setup
command: .circleci/unittest/linux/scripts/setup_env.sh
- designate_upload_channel
- run:
name: Install torchvision
command: .circleci/unittest/linux/scripts/install.sh
- run:
name: Build docs
command: |
set -ex
# turn v1.12.0rc3 into 1.12.0
tag=$(echo $CIRCLE_TAG | sed -e 's/v*\([0-9.]*\).*/\1/')
VERSION=${tag:-main}
eval "$(./conda/bin/conda shell.bash hook)"
conda activate ./env
pushd docs
pip install --progress-bar=off -r requirements.txt
make html
popd
- persist_to_workspace:
root: ./
paths:
- "*"
- store_artifacts:
path: ./docs/build/html
destination: docs
upload_docs:
<<: *binary_common
docker:
- image: "pytorch/manylinux-cuda100"
resource_class: 2xlarge+
steps:
- attach_workspace:
at: ~/workspace
- run:
name: Generate netrc
command: |
# set credentials for https pushing
# requires the org-member context
cat > ~/.netrc \<<DONE
machine github.com
login pytorchbot
password ${GITHUB_PYTORCHBOT_TOKEN}
DONE
- run:
name: Upload docs
command: |
# Don't use "checkout" step since it uses ssh, which cannot git push
# https://circleci.com/docs/2.0/configuration-reference/#checkout
set -ex
# Change v1.12.1rc1 into 1.12 (only major.minor)
tag=$(echo $CIRCLE_TAG | sed -e 's/v*\([0-9]*\.[0-9]*\).*/\1/')
target=${tag:-main}
~/workspace/.circleci/build_docs/commit_docs.sh ~/workspace $target
workflows:
lint:
jobs:
- circleci_consistency
- lint_python_and_config
- lint_c
- type_check_python
build:
jobs:
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: /.*/
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_linux_wheel_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_wheel_py3.7_cu116
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_wheel_py3.7_cu117
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_linux_wheel:
cu_version: rocm5.1.1
name: binary_linux_wheel_py3.7_rocm5.1.1
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_linux_wheel:
cu_version: rocm5.2
name: binary_linux_wheel_py3.7_rocm5.2
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_linux_wheel_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_wheel_py3.8_cu116
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_wheel_py3.8_cu117
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_linux_wheel:
cu_version: rocm5.1.1
name: binary_linux_wheel_py3.8_rocm5.1.1
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_linux_wheel:
cu_version: rocm5.2
name: binary_linux_wheel_py3.8_rocm5.2
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_linux_wheel_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_wheel_py3.9_cu116
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_wheel_py3.9_cu117
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_linux_wheel:
cu_version: rocm5.1.1
name: binary_linux_wheel_py3.9_rocm5.1.1
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_linux_wheel:
cu_version: rocm5.2
name: binary_linux_wheel_py3.9_rocm5.2
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_linux_wheel_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_wheel_py3.10_cu116
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_wheel_py3.10_cu117
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_linux_wheel:
cu_version: rocm5.1.1
name: binary_linux_wheel_py3.10_rocm5.1.1
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_linux_wheel:
cu_version: rocm5.2
name: binary_linux_wheel_py3.10_rocm5.2
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_wheel_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_wheel_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_wheel_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_wheel_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_win_wheel:
cu_version: cpu
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.7_cpu
python_version: '3.7'
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.7_cu116
python_version: '3.7'
- binary_win_wheel:
cu_version: cu117
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.7_cu117
python_version: '3.7'
- binary_win_wheel:
cu_version: cpu
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.8_cpu
python_version: '3.8'
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.8_cu116
python_version: '3.8'
- binary_win_wheel:
cu_version: cu117
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.8_cu117
python_version: '3.8'
- binary_win_wheel:
cu_version: cpu
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.9_cpu
python_version: '3.9'
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.9_cu116
python_version: '3.9'
- binary_win_wheel:
cu_version: cu117
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.9_cu117
python_version: '3.9'
- binary_win_wheel:
cu_version: cpu
name: binary_win_wheel_py3.10_cpu
python_version: '3.10'
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_wheel_py3.10_cu116
python_version: '3.10'
- binary_win_wheel:
cu_version: cu117
name: binary_win_wheel_py3.10_cu117
python_version: '3.10'
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_linux_conda_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_conda_py3.7_cu116
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_conda_py3.7_cu117
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_linux_conda_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_conda_py3.8_cu116
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_conda_py3.8_cu117
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_linux_conda_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_conda_py3.9_cu116
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_conda_py3.9_cu117
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_linux_conda_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
name: binary_linux_conda_py3.10_cu116
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
name: binary_linux_conda_py3.10_cu117
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_conda_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_conda_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_conda_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
name: binary_macos_conda_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_win_conda:
cu_version: cpu
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.7_cpu
python_version: '3.7'
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.7_cu116
python_version: '3.7'
- binary_win_conda:
cu_version: cu117
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.7_cu117
python_version: '3.7'
- binary_win_conda:
cu_version: cpu
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.8_cpu
python_version: '3.8'
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.8_cu116
python_version: '3.8'
- binary_win_conda:
cu_version: cu117
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.8_cu117
python_version: '3.8'
- binary_win_conda:
cu_version: cpu
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.9_cpu
python_version: '3.9'
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.9_cu116
python_version: '3.9'
- binary_win_conda:
cu_version: cu117
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.9_cu117
python_version: '3.9'
- binary_win_conda:
cu_version: cpu
name: binary_win_conda_py3.10_cpu
python_version: '3.10'
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: main
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: binary_win_conda_py3.10_cu116
python_version: '3.10'
- binary_win_conda:
cu_version: cu117
name: binary_win_conda_py3.10_cu117
python_version: '3.10'
- build_docs:
filters:
branches:
only:
- /.*/
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: build_docs
python_version: '3.7'
requires:
- binary_linux_wheel_py3.7_cpu
- upload_docs:
context: org-member
filters:
branches:
only:
- nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: upload_docs
python_version: '3.7'
requires:
- build_docs
- binary_ios_build:
build_environment: binary-libtorchvision_ops-ios-12.0.0-x86_64
ios_arch: x86_64
ios_platform: SIMULATOR
name: binary_libtorchvision_ops_ios_12.0.0_x86_64
- binary_ios_build:
build_environment: binary-libtorchvision_ops-ios-12.0.0-arm64
ios_arch: arm64
ios_platform: OS
name: binary_libtorchvision_ops_ios_12.0.0_arm64
- binary_android_build:
build_environment: binary-libtorchvision_ops-android
name: binary_libtorchvision_ops_android
unittest:
jobs:
- unittest_torchhub
- unittest_onnx
- unittest_extended
- unittest_linux_cpu:
cu_version: cpu
name: unittest_linux_cpu_py3.7
python_version: '3.7'
- unittest_linux_cpu:
cu_version: cpu
name: unittest_linux_cpu_py3.8
python_version: '3.8'
- unittest_linux_cpu:
cu_version: cpu
name: unittest_linux_cpu_py3.9
python_version: '3.9'
- unittest_linux_cpu:
cu_version: cpu
name: unittest_linux_cpu_py3.10
python_version: '3.10'
- unittest_linux_gpu:
cu_version: cu116
filters:
branches:
only:
- main
- nightly
name: unittest_linux_gpu_py3.7
python_version: '3.7'
- unittest_linux_gpu:
cu_version: cu116
name: unittest_linux_gpu_py3.8
python_version: '3.8'
- unittest_linux_gpu:
cu_version: cu116
filters:
branches:
only:
- main
- nightly
name: unittest_linux_gpu_py3.9
python_version: '3.9'
- unittest_linux_gpu:
cu_version: cu116
filters:
branches:
only:
- main
- nightly
name: unittest_linux_gpu_py3.10
python_version: '3.10'
- unittest_windows_cpu:
cu_version: cpu
name: unittest_windows_cpu_py3.7
python_version: '3.7'
- unittest_windows_cpu:
cu_version: cpu
name: unittest_windows_cpu_py3.8
python_version: '3.8'
- unittest_windows_cpu:
cu_version: cpu
name: unittest_windows_cpu_py3.9
python_version: '3.9'
- unittest_windows_cpu:
cu_version: cpu
name: unittest_windows_cpu_py3.10
python_version: '3.10'
- unittest_windows_gpu:
cu_version: cu116
filters:
branches:
only:
- main
- nightly
name: unittest_windows_gpu_py3.7
python_version: '3.7'
- unittest_windows_gpu:
cu_version: cu116
name: unittest_windows_gpu_py3.8
python_version: '3.8'
- unittest_windows_gpu:
cu_version: cu116
filters:
branches:
only:
- main
- nightly
name: unittest_windows_gpu_py3.9
python_version: '3.9'
- unittest_windows_gpu:
cu_version: cu116
filters:
branches:
only:
- main
- nightly
name: unittest_windows_gpu_py3.10
python_version: '3.10'
- unittest_macos_cpu:
cu_version: cpu
name: unittest_macos_cpu_py3.7
python_version: '3.7'
- unittest_macos_cpu:
cu_version: cpu
name: unittest_macos_cpu_py3.8
python_version: '3.8'
- unittest_macos_cpu:
cu_version: cpu
name: unittest_macos_cpu_py3.9
python_version: '3.9'
- unittest_macos_cpu:
cu_version: cpu
name: unittest_macos_cpu_py3.10
python_version: '3.10'
cmake:
jobs:
- cmake_linux_cpu:
cu_version: cpu
name: cmake_linux_cpu
python_version: '3.8'
- cmake_linux_gpu:
cu_version: cu116
name: cmake_linux_gpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda116
- cmake_windows_cpu:
cu_version: cpu
name: cmake_windows_cpu
python_version: '3.8'
- cmake_windows_gpu:
cu_version: cu116
name: cmake_windows_gpu
python_version: '3.8'
- cmake_macos_cpu:
cu_version: cpu
name: cmake_macos_cpu
python_version: '3.8'
nightly:
jobs:
- binary_ios_build:
build_environment: nightly-binary-libtorchvision_ops-ios-12.0.0-x86_64
filters:
branches:
only:
- nightly
ios_arch: x86_64
ios_platform: SIMULATOR
name: nightly_binary_libtorchvision_ops_ios_12.0.0_x86_64
- binary_ios_build:
build_environment: nightly-binary-libtorchvision_ops-ios-12.0.0-arm64
filters:
branches:
only:
- nightly
ios_arch: arm64
ios_platform: OS
name: nightly_binary_libtorchvision_ops_ios_12.0.0_arm64
- binary_ios_upload:
build_environment: nightly-binary-libtorchvision_ops-ios-12.0.0-upload
context: org-member
filters:
branches:
only:
- nightly
requires:
- nightly_binary_libtorchvision_ops_ios_12.0.0_x86_64
- nightly_binary_libtorchvision_ops_ios_12.0.0_arm64
- binary_android_upload:
build_environment: nightly-binary-libtorchvision_ops-android-upload
context: org-member
filters:
branches:
only:
- nightly
name: nightly_binary_libtorchvision_ops_android_upload
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_cpu_upload
requires:
- nightly_binary_linux_wheel_py3.7_cpu
subfolder: cpu/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_cu116
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_cu116_upload
requires:
- nightly_binary_linux_wheel_py3.7_cu116
subfolder: cu116/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_cu117
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_cu117_upload
requires:
- nightly_binary_linux_wheel_py3.7_cu117
subfolder: cu117/
- binary_linux_wheel:
cu_version: rocm5.1.1
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_rocm5.1.1
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_rocm5.1.1_upload
requires:
- nightly_binary_linux_wheel_py3.7_rocm5.1.1
subfolder: rocm5.1.1/
- binary_linux_wheel:
cu_version: rocm5.2
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_rocm5.2
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.7_rocm5.2_upload
requires:
- nightly_binary_linux_wheel_py3.7_rocm5.2
subfolder: rocm5.2/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_cpu_upload
requires:
- nightly_binary_linux_wheel_py3.8_cpu
subfolder: cpu/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_cu116
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_cu116_upload
requires:
- nightly_binary_linux_wheel_py3.8_cu116
subfolder: cu116/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_cu117
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_cu117_upload
requires:
- nightly_binary_linux_wheel_py3.8_cu117
subfolder: cu117/
- binary_linux_wheel:
cu_version: rocm5.1.1
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_rocm5.1.1
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_rocm5.1.1_upload
requires:
- nightly_binary_linux_wheel_py3.8_rocm5.1.1
subfolder: rocm5.1.1/
- binary_linux_wheel:
cu_version: rocm5.2
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_rocm5.2
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.8_rocm5.2_upload
requires:
- nightly_binary_linux_wheel_py3.8_rocm5.2
subfolder: rocm5.2/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_cpu_upload
requires:
- nightly_binary_linux_wheel_py3.9_cpu
subfolder: cpu/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_cu116
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_cu116_upload
requires:
- nightly_binary_linux_wheel_py3.9_cu116
subfolder: cu116/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_cu117
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_cu117_upload
requires:
- nightly_binary_linux_wheel_py3.9_cu117
subfolder: cu117/
- binary_linux_wheel:
cu_version: rocm5.1.1
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_rocm5.1.1
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_rocm5.1.1_upload
requires:
- nightly_binary_linux_wheel_py3.9_rocm5.1.1
subfolder: rocm5.1.1/
- binary_linux_wheel:
cu_version: rocm5.2
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_rocm5.2
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.9_rocm5.2_upload
requires:
- nightly_binary_linux_wheel_py3.9_rocm5.2
subfolder: rocm5.2/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_cpu_upload
requires:
- nightly_binary_linux_wheel_py3.10_cpu
subfolder: cpu/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_cu116
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_cu116_upload
requires:
- nightly_binary_linux_wheel_py3.10_cu116
subfolder: cu116/
- binary_linux_wheel:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_cu117
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_cu117_upload
requires:
- nightly_binary_linux_wheel_py3.10_cu117
subfolder: cu117/
- binary_linux_wheel:
cu_version: rocm5.1.1
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_rocm5.1.1
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-rocm:5.1.1
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_rocm5.1.1_upload
requires:
- nightly_binary_linux_wheel_py3.10_rocm5.1.1
subfolder: rocm5.1.1/
- binary_linux_wheel:
cu_version: rocm5.2
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_rocm5.2
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-rocm:5.2
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_wheel_py3.10_rocm5.2_upload
requires:
- nightly_binary_linux_wheel_py3.10_rocm5.2
subfolder: rocm5.2/
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.7_cpu_upload
requires:
- nightly_binary_macos_wheel_py3.7_cpu
subfolder: ''
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.8_cpu_upload
requires:
- nightly_binary_macos_wheel_py3.8_cpu
subfolder: ''
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.9_cpu_upload
requires:
- nightly_binary_macos_wheel_py3.9_cpu
subfolder: ''
- binary_macos_wheel:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_wheel_py3.10_cpu_upload
requires:
- nightly_binary_macos_wheel_py3.10_cpu
subfolder: ''
- binary_win_wheel:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.7_cpu
python_version: '3.7'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.7_cpu_upload
requires:
- nightly_binary_win_wheel_py3.7_cpu
subfolder: cpu/
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.7_cu116
python_version: '3.7'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.7_cu116_upload
requires:
- nightly_binary_win_wheel_py3.7_cu116
subfolder: cu116/
- binary_win_wheel:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.7_cu117
python_version: '3.7'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.7_cu117_upload
requires:
- nightly_binary_win_wheel_py3.7_cu117
subfolder: cu117/
- binary_win_wheel:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.8_cpu
python_version: '3.8'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.8_cpu_upload
requires:
- nightly_binary_win_wheel_py3.8_cpu
subfolder: cpu/
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.8_cu116
python_version: '3.8'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.8_cu116_upload
requires:
- nightly_binary_win_wheel_py3.8_cu116
subfolder: cu116/
- binary_win_wheel:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.8_cu117
python_version: '3.8'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.8_cu117_upload
requires:
- nightly_binary_win_wheel_py3.8_cu117
subfolder: cu117/
- binary_win_wheel:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.9_cpu
python_version: '3.9'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.9_cpu_upload
requires:
- nightly_binary_win_wheel_py3.9_cpu
subfolder: cpu/
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.9_cu116
python_version: '3.9'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.9_cu116_upload
requires:
- nightly_binary_win_wheel_py3.9_cu116
subfolder: cu116/
- binary_win_wheel:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.9_cu117
python_version: '3.9'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.9_cu117_upload
requires:
- nightly_binary_win_wheel_py3.9_cu117
subfolder: cu117/
- binary_win_wheel:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.10_cpu
python_version: '3.10'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.10_cpu_upload
requires:
- nightly_binary_win_wheel_py3.10_cpu
subfolder: cpu/
- binary_win_wheel:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.10_cu116
python_version: '3.10'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.10_cu116_upload
requires:
- nightly_binary_win_wheel_py3.10_cu116
subfolder: cu116/
- binary_win_wheel:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.10_cu117
python_version: '3.10'
- binary_wheel_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_wheel_py3.10_cu117_upload
requires:
- nightly_binary_win_wheel_py3.10_cu117
subfolder: cu117/
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.7_cpu_upload
requires:
- nightly_binary_linux_conda_py3.7_cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.7_cu116
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.7_cu116_upload
requires:
- nightly_binary_linux_conda_py3.7_cu116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.7_cu117
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.7_cu117_upload
requires:
- nightly_binary_linux_conda_py3.7_cu117
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.8_cpu_upload
requires:
- nightly_binary_linux_conda_py3.8_cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.8_cu116
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.8_cu116_upload
requires:
- nightly_binary_linux_conda_py3.8_cu116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.8_cu117
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.8_cu117_upload
requires:
- nightly_binary_linux_conda_py3.8_cu117
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.9_cpu_upload
requires:
- nightly_binary_linux_conda_py3.9_cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.9_cu116
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.9_cu116_upload
requires:
- nightly_binary_linux_conda_py3.9_cu116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.9_cu117
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.9_cu117_upload
requires:
- nightly_binary_linux_conda_py3.9_cu117
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.10_cpu_upload
requires:
- nightly_binary_linux_conda_py3.10_cpu
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda116
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.10_cu116
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda116
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.10_cu116_upload
requires:
- nightly_binary_linux_conda_py3.10_cu116
- binary_linux_conda:
conda_docker_image: pytorch/conda-builder:cuda117
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.10_cu117
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cuda117
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_linux_conda_py3.10_cu117_upload
requires:
- nightly_binary_linux_conda_py3.10_cu117
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.7_cpu
python_version: '3.7'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.7_cpu_upload
requires:
- nightly_binary_macos_conda_py3.7_cpu
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.8_cpu
python_version: '3.8'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.8_cpu_upload
requires:
- nightly_binary_macos_conda_py3.8_cpu
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.9_cpu
python_version: '3.9'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.9_cpu_upload
requires:
- nightly_binary_macos_conda_py3.9_cpu
- binary_macos_conda:
conda_docker_image: pytorch/conda-builder:cpu
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.10_cpu
python_version: '3.10'
wheel_docker_image: pytorch/manylinux-cpu
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_macos_conda_py3.10_cpu_upload
requires:
- nightly_binary_macos_conda_py3.10_cpu
- binary_win_conda:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.7_cpu
python_version: '3.7'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.7_cpu_upload
requires:
- nightly_binary_win_conda_py3.7_cpu
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.7_cu116
python_version: '3.7'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.7_cu116_upload
requires:
- nightly_binary_win_conda_py3.7_cu116
- binary_win_conda:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.7_cu117
python_version: '3.7'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.7_cu117_upload
requires:
- nightly_binary_win_conda_py3.7_cu117
- binary_win_conda:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.8_cpu
python_version: '3.8'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.8_cpu_upload
requires:
- nightly_binary_win_conda_py3.8_cpu
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.8_cu116
python_version: '3.8'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.8_cu116_upload
requires:
- nightly_binary_win_conda_py3.8_cu116
- binary_win_conda:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.8_cu117
python_version: '3.8'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.8_cu117_upload
requires:
- nightly_binary_win_conda_py3.8_cu117
- binary_win_conda:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.9_cpu
python_version: '3.9'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.9_cpu_upload
requires:
- nightly_binary_win_conda_py3.9_cpu
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.9_cu116
python_version: '3.9'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.9_cu116_upload
requires:
- nightly_binary_win_conda_py3.9_cu116
- binary_win_conda:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.9_cu117
python_version: '3.9'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.9_cu117_upload
requires:
- nightly_binary_win_conda_py3.9_cu117
- binary_win_conda:
cu_version: cpu
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.10_cpu
python_version: '3.10'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.10_cpu_upload
requires:
- nightly_binary_win_conda_py3.10_cpu
- binary_win_conda:
cu_version: cu116
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.10_cu116
python_version: '3.10'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.10_cu116_upload
requires:
- nightly_binary_win_conda_py3.10_cu116
- binary_win_conda:
cu_version: cu117
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.10_cu117
python_version: '3.10'
- binary_conda_upload:
context: org-member
filters:
branches:
only: nightly
tags:
only: /v[0-9]+(\.[0-9]+)*-rc[0-9]+/
name: nightly_binary_win_conda_py3.10_cu117_upload
requires:
- nightly_binary_win_conda_py3.10_cu117
docker_build:
triggers:
- schedule:
cron: "0 10 * * 0"
filters:
branches:
only:
- main
jobs:
- smoke_test_docker_image_build:
context: org-member
version: 2.1
# How to test the Linux jobs:
# - Install CircleCI local CLI: https://circleci.com/docs/2.0/local-cli/
# - circleci config process .circleci/config.yml > gen.yml && circleci local execute -c gen.yml --job binary_linux_wheel_py3.7
# - Replace binary_linux_wheel_py3.7 with the name of the job you want to test.
# Job names are 'name:' key.
executors:
windows-cpu:
machine:
resource_class: windows.xlarge
image: windows-server-2019-vs2019:stable
shell: bash.exe
windows-gpu:
machine:
resource_class: windows.gpu.nvidia.medium
image: windows-server-2019-nvidia:stable
shell: bash.exe
commands:
checkout_merge:
description: "checkout merge branch"
steps:
- checkout
# - run:
# name: Checkout merge branch
# command: |
# set -ex
# BRANCH=$(git rev-parse --abbrev-ref HEAD)
# if [[ "$BRANCH" != "main" ]]; then
# git fetch --force origin ${CIRCLE_BRANCH}/merge:merged/${CIRCLE_BRANCH}
# git checkout "merged/$CIRCLE_BRANCH"
# fi
designate_upload_channel:
description: "inserts the correct upload channel into ${BASH_ENV}"
steps:
- run:
name: adding UPLOAD_CHANNEL to BASH_ENV
command: |
our_upload_channel=test
echo "export UPLOAD_CHANNEL=${our_upload_channel}" >> ${BASH_ENV}
brew_update:
description: "Update Homebrew and install base formulae"
steps:
- run:
name: Update Homebrew
no_output_timeout: "10m"
command: |
set -ex
# Update repositories manually.
# Running `brew update` produces a comparison between the
# current checkout and the updated checkout, which takes a
# very long time because the existing checkout is 2y old.
for path in $(find /usr/local/Homebrew -type d -name .git)
do
cd $path/..
git fetch --depth=1 origin
git reset --hard origin/master
done
export HOMEBREW_NO_AUTO_UPDATE=1
# Install expect and moreutils so that we can call `unbuffer` and `ts`.
# moreutils installs a `parallel` executable by default, which conflicts
# with the executable from the GNU `parallel`, so we must unlink GNU
# `parallel` first, and relink it afterwards.
brew install coreutils
brew unlink parallel
brew install moreutils
brew link parallel --overwrite
brew install expect
brew_install:
description: "Install Homebrew formulae"
parameters:
formulae:
type: string
default: ""
steps:
- run:
name: Install << parameters.formulae >>
no_output_timeout: "10m"
command: |
set -ex
export HOMEBREW_NO_AUTO_UPDATE=1
brew install << parameters.formulae >>
run_brew_for_ios_build:
steps:
- brew_update
- brew_install:
formulae: libtool
apt_install:
parameters:
args:
type: string
descr:
type: string
default: ""
update:
type: boolean
default: true
steps:
- run:
name: >
<<^ parameters.descr >> apt install << parameters.args >> <</ parameters.descr >>
<<# parameters.descr >> << parameters.descr >> <</ parameters.descr >>
command: |
<<# parameters.update >> sudo apt update -qy <</ parameters.update >>
sudo apt install << parameters.args >>
pip_install:
parameters:
args:
type: string
descr:
type: string
default: ""
user:
type: boolean
default: true
steps:
- run:
name: >
<<^ parameters.descr >> pip install << parameters.args >> <</ parameters.descr >>
<<# parameters.descr >> << parameters.descr >> <</ parameters.descr >>
command: >
pip install
<<# parameters.user >> --user <</ parameters.user >>
--progress-bar=off
<< parameters.args >>
install_torchvision:
parameters:
editable:
type: boolean
default: true
steps:
- pip_install:
args: --pre torch -f https://download.pytorch.org/whl/test/cpu/torch_test.html
descr: Install PyTorch from nightly releases
- pip_install:
args: --no-build-isolation <<# parameters.editable >> --editable <</ parameters.editable >> .
descr: Install torchvision <<# parameters.editable >> in editable mode <</ parameters.editable >>
# Most of the test suite is handled by the `unittest` jobs, with completely different workflow and setup.
# This command can be used if only a selection of tests need to be run, for ad-hoc files.
run_tests_selective:
parameters:
file_or_dir:
type: string
steps:
- run:
name: Install test utilities
command: pip install --progress-bar=off pytest pytest-mock
- run:
name: Run tests
command: pytest --junitxml=test-results/junit.xml -v --durations 20 <<parameters.file_or_dir>>
- store_test_results:
path: test-results
download_model_weights:
parameters:
extract_roots:
type: string
default: "torchvision/models"
background:
type: boolean
default: true
steps:
- apt_install:
args: parallel wget
descr: Install download utilitites
- run:
name: Download model weights
background: << parameters.background >>
command: |
mkdir -p ~/.cache/torch/hub/checkpoints
python scripts/collect_model_urls.py << parameters.extract_roots >> \
| parallel -j0 'wget --no-verbose -O ~/.cache/torch/hub/checkpoints/`basename {}` {}\?source=ci'
binary_common: &binary_common
parameters:
# Edit these defaults to do a release
build_version:
description: "version number of release binary; by default, build a nightly"
type: string
default: "0.14.1"
pytorch_version:
description: "PyTorch version to build against; by default, use a nightly"
type: string
default: "1.13.1"
# Don't edit these
python_version:
description: "Python version to build against (e.g., 3.7)"
type: string
cu_version:
description: "CUDA version to build against, in CU format (e.g., cpu or cu100)"
type: string
default: "cpu"
unicode_abi:
description: "Python 2.7 wheel only: whether or not we are cp27mu (default: no)"
type: string
default: ""
wheel_docker_image:
description: "Wheel only: what docker image to use"
type: string
default: ""
conda_docker_image:
description: "Conda only: what docker image to use"
type: string
default: "pytorch/conda-builder:cpu"
environment:
PYTHON_VERSION: << parameters.python_version >>
PYTORCH_VERSION: << parameters.pytorch_version >>
UNICODE_ABI: << parameters.unicode_abi >>
CU_VERSION: << parameters.cu_version >>
MACOSX_DEPLOYMENT_TARGET: 10.9
torchvision_ios_params: &torchvision_ios_params
parameters:
build_environment:
type: string
default: ""
ios_arch:
type: string
default: ""
ios_platform:
type: string
default: ""
environment:
BUILD_ENVIRONMENT: << parameters.build_environment >>
IOS_ARCH: << parameters.ios_arch >>
IOS_PLATFORM: << parameters.ios_platform >>
torchvision_android_params: &torchvision_android_params
parameters:
build_environment:
type: string
default: ""
environment:
BUILD_ENVIRONMENT: << parameters.build_environment >>
smoke_test_common: &smoke_test_common
<<: *binary_common
docker:
- image: torchvision/smoke_test:latest
jobs:
circleci_consistency:
docker:
- image: cimg/python:3.7
steps:
- checkout
- pip_install:
args: jinja2 pyyaml
- run:
name: Check CircleCI config consistency
command: |
python .circleci/regenerate.py
git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1)
lint_python_and_config:
docker:
- image: cimg/python:3.7
steps:
- checkout
- pip_install:
args: pre-commit
descr: Install lint utilities
- run:
name: Install pre-commit hooks
command: pre-commit install-hooks
- run:
name: Lint Python code and config files
command: pre-commit run --all-files
- run:
name: Required lint modifications
when: on_fail
command: git --no-pager diff
lint_c:
docker:
- image: cimg/python:3.7
steps:
- apt_install:
args: libtinfo5
descr: Install additional system libraries
- checkout
- run:
name: Install lint utilities
command: |
curl https://oss-clang-format.s3.us-east-2.amazonaws.com/linux64/clang-format-linux64 -o clang-format
chmod +x clang-format
sudo mv clang-format /opt/clang-format
- run:
name: Lint C code
command: ./.circleci/unittest/linux/scripts/run-clang-format.py -r torchvision/csrc --clang-format-executable /opt/clang-format
- run:
name: Required lint modifications
when: on_fail
command: git --no-pager diff
type_check_python:
docker:
- image: cimg/python:3.7
steps:
- checkout
- install_torchvision:
editable: true
- pip_install:
args: mypy
descr: Install Python type check utilities
- run:
name: Check Python types statically
command: mypy --install-types --non-interactive --config-file mypy.ini
unittest_torchhub:
docker:
- image: cimg/python:3.7
steps:
- checkout
- install_torchvision
- run_tests_selective:
file_or_dir: test/test_hub.py
unittest_onnx:
docker:
- image: cimg/python:3.7
steps:
- checkout
- install_torchvision
- pip_install:
args: onnx onnxruntime
descr: Install ONNX
- run_tests_selective:
file_or_dir: test/test_onnx.py
unittest_extended:
docker:
- image: cimg/python:3.7
resource_class: xlarge
steps:
- checkout
- download_model_weights
- install_torchvision
- run:
name: Enable extended tests
command: echo 'export PYTORCH_TEST_WITH_EXTENDED=1' >> $BASH_ENV
- run_tests_selective:
file_or_dir: test/test_extended_*.py
binary_linux_wheel:
<<: *binary_common
docker:
- image: << parameters.wheel_docker_image >>
resource_class: 2xlarge+
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build conda packages
no_output_timeout: 30m
command: |
set -ex
packaging/build_wheel.sh
- store_artifacts:
path: dist
- persist_to_workspace:
root: dist
paths:
- "*"
binary_linux_conda:
<<: *binary_common
docker:
- image: "<< parameters.conda_docker_image >>"
resource_class: 2xlarge+
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build conda packages
no_output_timeout: 30m
command: |
set -ex
packaging/build_conda.sh
- store_artifacts:
path: /opt/conda/conda-bld/linux-64
- persist_to_workspace:
root: /opt/conda/conda-bld/linux-64
paths:
- "*"
- store_test_results:
path: build_results/
binary_win_conda:
<<: *binary_common
executor: windows-cpu
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build conda packages
no_output_timeout: 30m
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/windows/internal/cuda_install.bat
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda activate base
conda install -yq conda-build "conda-package-handling!=1.5.0"
packaging/build_conda.sh
rm /C/tools/miniconda3/conda-bld/win-64/vs${VC_YEAR}*.tar.bz2
- store_artifacts:
path: C:/tools/miniconda3/conda-bld/win-64
- persist_to_workspace:
root: C:/tools/miniconda3/conda-bld/win-64
paths:
- "*"
- store_test_results:
path: build_results/
binary_win_wheel:
<<: *binary_common
executor: windows-cpu
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Build wheel packages
no_output_timeout: 30m
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/windows/internal/cuda_install.bat
packaging/build_wheel.sh
- store_artifacts:
path: dist
- persist_to_workspace:
root: dist
paths:
- "*"
- store_test_results:
path: build_results/
binary_macos_wheel:
<<: *binary_common
macos:
xcode: "14.0"
steps:
- checkout_merge
- designate_upload_channel
- run:
# Cannot easily deduplicate this as source'ing activate
# will set environment variables which we need to propagate
# to build_wheel.sh
command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
sh conda.sh -b
source $HOME/miniconda3/bin/activate
packaging/build_wheel.sh
- store_artifacts:
path: dist
- persist_to_workspace:
root: dist
paths:
- "*"
binary_ios_build:
<<: *torchvision_ios_params
macos:
xcode: "14.0"
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run_brew_for_ios_build
- run:
name: Build
no_output_timeout: "1h"
command: |
script="/Users/distiller/project/.circleci/unittest/ios/scripts/binary_ios_build.sh"
cat "$script"
source "$script"
- persist_to_workspace:
root: /Users/distiller/workspace/
paths: ios
binary_ios_upload:
<<: *torchvision_ios_params
macos:
xcode: "14.0"
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run_brew_for_ios_build
- run:
name: Upload
no_output_timeout: "1h"
command: |
script="/Users/distiller/project/.circleci/unittest/ios/scripts/binary_ios_upload.sh"
cat "$script"
source "$script"
binary_android_build:
<<: *torchvision_android_params
docker:
- image: cimg/android:2021.08-ndk
resource_class: xlarge
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run:
name: Build
no_output_timeout: "1h"
command: |
script="/home/circleci/project/.circleci/unittest/android/scripts/binary_android_build.sh"
cat "$script"
source "$script"
- store_artifacts:
path: ~/workspace/artifacts
binary_android_upload:
<<: *torchvision_android_params
docker:
- image: cimg/android:2021.08-ndk
resource_class: xlarge
steps:
- attach_workspace:
at: ~/workspace
- checkout
- run:
name: Upload
no_output_timeout: "1h"
command: |
script="/home/circleci/project/.circleci/unittest/android/scripts/binary_android_upload.sh"
cat "$script"
source "$script"
binary_macos_conda:
<<: *binary_common
macos:
xcode: "14.0"
steps:
- checkout_merge
- designate_upload_channel
- run:
command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
sh conda.sh -b
source $HOME/miniconda3/bin/activate
conda install -yq conda-build
packaging/build_conda.sh
- store_artifacts:
path: /Users/distiller/miniconda3/conda-bld/osx-64
- persist_to_workspace:
root: /Users/distiller/miniconda3/conda-bld/osx-64
paths:
- "*"
- store_test_results:
path: build_results/
# Requires org-member context
binary_conda_upload:
docker:
- image: continuumio/miniconda
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
command: |
# Prevent credential from leaking
conda install -yq anaconda-client
set -x
anaconda -t "${CONDA_PYTORCHBOT_TOKEN}" upload ~/workspace/*.tar.bz2 -u "pytorch-${UPLOAD_CHANNEL}" --label main --no-progress --force
# Requires org-member context
binary_wheel_upload:
parameters:
subfolder:
description: "What whl subfolder to upload to, e.g., blank or cu100/ (trailing slash is important)"
type: string
docker:
- image: cimg/python:3.7
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- checkout
- pip_install:
args: awscli
- run:
command: |
export PATH="$HOME/.local/bin:$PATH"
# Prevent credential from leaking
set +x
export AWS_ACCESS_KEY_ID="${PYTORCH_BINARY_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${PYTORCH_BINARY_AWS_SECRET_ACCESS_KEY}"
set -x
for pkg in ~/workspace/*.whl; do
aws s3 cp "$pkg" "s3://pytorch/whl/${UPLOAD_CHANNEL}/<< parameters.subfolder >>" --acl public-read
done
smoke_test_linux_conda:
<<: *smoke_test_common
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
conda install -v -y -c pytorch-nightly pytorch
conda install -v -y $(ls ~/workspace/torchvision*.tar.bz2)
- run:
name: smoke test
command: |
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
python -c "import torchvision"
smoke_test_linux_pip:
<<: *smoke_test_common
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
- pip_install:
args: $(ls ~/workspace/torchvision*.whl) --pre -f https://download.pytorch.org/whl/test/torch_test.html
- run:
name: smoke test
command: |
source /usr/local/etc/profile.d/conda.sh && conda activate python${PYTHON_VERSION}
python -c "import torchvision"
smoke_test_docker_image_build:
machine:
image: ubuntu-2004:202104-01
resource_class: large
environment:
image_name: torchvision/smoke_test
steps:
- checkout
- designate_upload_channel
- run:
name: Build and push Docker image
no_output_timeout: "1h"
command: |
set +x
echo "${DOCKER_HUB_TOKEN}" | docker login --username "${DOCKER_HUB_USERNAME}" --password-stdin
set -x
cd .circleci/smoke_test/docker && docker build . -t ${image_name}:${CIRCLE_WORKFLOW_ID}
docker tag ${image_name}:${CIRCLE_WORKFLOW_ID} ${image_name}:latest
docker push ${image_name}:${CIRCLE_WORKFLOW_ID}
docker push ${image_name}:latest
smoke_test_win_conda:
<<: *binary_common
executor:
name: windows-cpu
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda env remove -n python${PYTHON_VERSION} || true
conda create -yn python${PYTHON_VERSION} python=${PYTHON_VERSION}
conda activate python${PYTHON_VERSION}
conda install -v -y -c pytorch-nightly pytorch
conda install -v -y $(ls ~/workspace/torchvision*.tar.bz2)
- run:
name: smoke test
command: |
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda activate python${PYTHON_VERSION}
python -c "import torchvision"
smoke_test_win_pip:
<<: *binary_common
executor:
name: windows-cpu
steps:
- attach_workspace:
at: ~/workspace
- designate_upload_channel
- run:
name: install binaries
command: |
set -x
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda create -yn python${PYTHON_VERSION} python=${PYTHON_VERSION}
conda activate python${PYTHON_VERSION}
- pip_install:
args: $(ls ~/workspace/torchvision*.whl) --pre -f https://download.pytorch.org/whl/test/torch_test.html
- run:
name: smoke test
command: |
eval "$('/C/tools/miniconda3/Scripts/conda.exe' 'shell.bash' 'hook')"
conda activate python${PYTHON_VERSION}
python -c "import torchvision"
unittest_linux_cpu:
<<: *binary_common
docker:
- image: "pytorch/manylinux-cpu"
resource_class: 2xlarge+
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
{% raw %}
keys:
- env-v2-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
- run:
name: Setup
command: .circleci/unittest/linux/scripts/setup_env.sh
- save_cache:
{% raw %}
key: env-v2-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
paths:
- conda
- env
- run:
name: Install torchvision
command: .circleci/unittest/linux/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/linux/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/linux/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_linux_gpu:
<<: *binary_common
machine:
image: ubuntu-2004-cuda-11.4:202110-01
resource_class: gpu.nvidia.medium
environment:
image_name: "pytorch/manylinux-cuda116"
CU_VERSION: << parameters.cu_version >>
PYTHON_VERSION: << parameters.python_version >>
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
{% raw %}
keys:
- env-v3-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
- run:
name: Setup
command: docker run -e PYTHON_VERSION -t --gpus all -v $PWD:$PWD -w $PWD "${image_name}" .circleci/unittest/linux/scripts/setup_env.sh
- save_cache:
{% raw %}
key: env-v3-linux-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
paths:
- conda
- env
- run:
# Here we create an envlist file that contains some env variables that we want the docker container to be aware of.
# Normally, the CIRCLECI variable is set and available on all CI workflows: https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables.
# They're avaiable in all the other workflows (OSX and Windows).
# But here, we're running the unittest_linux_gpu workflows in a docker container, where those variables aren't accessible.
# So instead we dump the variables we need in env.list and we pass that file when invoking "docker run".
name: export CIRCLECI env var
command: echo "CIRCLECI=true" >> ./env.list
- run:
name: Install torchvision
command: docker run -t --gpus all -v $PWD:$PWD -w $PWD -e UPLOAD_CHANNEL -e CU_VERSION "${image_name}" .circleci/unittest/linux/scripts/install.sh
- run:
name: Run tests
command: docker run --env-file ./env.list -t --gpus all -v $PWD:$PWD -w $PWD "${image_name}" .circleci/unittest/linux/scripts/run_test.sh
- run:
name: Post Process
command: docker run -t --gpus all -v $PWD:$PWD -w $PWD "${image_name}" .circleci/unittest/linux/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_windows_cpu:
<<: *binary_common
executor:
name: windows-cpu
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
{% raw %}
keys:
- env-v2-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
- run:
name: Setup
command: .circleci/unittest/windows/scripts/setup_env.sh
- save_cache:
{% raw %}
key: env-v2-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
paths:
- conda
- env
- run:
name: Install torchvision
command: .circleci/unittest/windows/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/windows/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/windows/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_windows_gpu:
<<: *binary_common
executor:
name: windows-gpu
environment:
CUDA_VERSION: "11.6"
PYTHON_VERSION: << parameters.python_version >>
steps:
- checkout
- designate_upload_channel
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
{% raw %}
keys:
- env-v1-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
- run:
name: Setup
command: .circleci/unittest/windows/scripts/setup_env.sh
- save_cache:
{% raw %}
key: env-v1-windows-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/windows/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
paths:
- conda
- env
- run:
name: Install CUDA
command: packaging/windows/internal/cuda_install.bat
- run:
name: Update CUDA driver
command: packaging/windows/internal/driver_update.bat
- run:
name: Install torchvision
command: .circleci/unittest/windows/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/windows/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/windows/scripts/post_process.sh
- store_test_results:
path: test-results
unittest_macos_cpu:
<<: *binary_common
macos:
xcode: "14.0"
resource_class: large
steps:
- checkout
- designate_upload_channel
- run:
name: Install wget
command: HOMEBREW_NO_AUTO_UPDATE=1 brew install wget
# Disable brew auto update which is very slow
- run:
name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache.
command: echo "$(date +"%Y-%U")" > .circleci-weekly
- restore_cache:
{% raw %}
keys:
- env-v3-macos-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
- run:
name: Setup
command: .circleci/unittest/linux/scripts/setup_env.sh
- save_cache:
{% raw %}
key: env-v3-macos-{{ arch }}-py<< parameters.python_version >>-{{ checksum ".circleci/unittest/linux/scripts/environment.yml" }}-{{ checksum ".circleci-weekly" }}
{% endraw %}
paths:
- conda
- env
- run:
name: Install torchvision
command: .circleci/unittest/linux/scripts/install.sh
- run:
name: Run tests
command: .circleci/unittest/linux/scripts/run_test.sh
- run:
name: Post process
command: .circleci/unittest/linux/scripts/post_process.sh
- store_test_results:
path: test-results
cmake_linux_cpu:
<<: *binary_common
docker:
- image: "pytorch/manylinux-cpu"
resource_class: 2xlarge+
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Setup conda
command: .circleci/unittest/linux/scripts/setup_env.sh
- run: packaging/build_cmake.sh
cmake_linux_gpu:
<<: *binary_common
machine:
image: ubuntu-2004-cuda-11.4:202110-01
resource_class: gpu.nvidia.small
environment:
PYTHON_VERSION: << parameters.python_version >>
PYTORCH_VERSION: << parameters.pytorch_version >>
UNICODE_ABI: << parameters.unicode_abi >>
CU_VERSION: << parameters.cu_version >>
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Setup conda
command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> .circleci/unittest/linux/scripts/setup_env.sh
- run:
name: Build torchvision C++ distribution and test
no_output_timeout: 30m
command: docker run -e CU_VERSION -e PYTHON_VERSION -e UNICODE_ABI -e PYTORCH_VERSION -e UPLOAD_CHANNEL -t --gpus all -v $PWD:$PWD -w $PWD << parameters.wheel_docker_image >> packaging/build_cmake.sh
cmake_macos_cpu:
<<: *binary_common
macos:
xcode: "14.0"
steps:
- checkout_merge
- designate_upload_channel
- run:
command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
sh conda.sh -b
source $HOME/miniconda3/bin/activate
conda install -yq conda-build cmake
packaging/build_cmake.sh
cmake_windows_cpu:
<<: *binary_common
executor:
name: windows-cpu
steps:
- checkout_merge
- designate_upload_channel
- run:
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/build_cmake.sh
cmake_windows_gpu:
<<: *binary_common
executor:
name: windows-gpu
steps:
- checkout_merge
- designate_upload_channel
- run:
name: Update CUDA driver
command: packaging/windows/internal/driver_update.bat
- run:
command: |
set -ex
source packaging/windows/internal/vc_install_helper.sh
packaging/windows/internal/cuda_install.bat
packaging/build_cmake.sh
build_docs:
<<: *binary_common
docker:
- image: cimg/python:3.7
resource_class: 2xlarge+
steps:
- attach_workspace:
at: ~/workspace
- checkout
- download_model_weights
- run:
name: Setup
command: .circleci/unittest/linux/scripts/setup_env.sh
- designate_upload_channel
- run:
name: Install torchvision
command: .circleci/unittest/linux/scripts/install.sh
- run:
name: Build docs
command: |
set -ex
# turn v1.12.0rc3 into 1.12.0
tag=$(echo $CIRCLE_TAG | sed -e 's/v*\([0-9.]*\).*/\1/')
VERSION=${tag:-main}
eval "$(./conda/bin/conda shell.bash hook)"
conda activate ./env
pushd docs
pip install --progress-bar=off -r requirements.txt
make html
popd
- persist_to_workspace:
root: ./
paths:
- "*"
- store_artifacts:
path: ./docs/build/html
destination: docs
upload_docs:
<<: *binary_common
docker:
- image: "pytorch/manylinux-cuda100"
resource_class: 2xlarge+
steps:
- attach_workspace:
at: ~/workspace
- run:
name: Generate netrc
command: |
# set credentials for https pushing
# requires the org-member context
cat > ~/.netrc \<<DONE
machine github.com
login pytorchbot
password ${GITHUB_PYTORCHBOT_TOKEN}
DONE
- run:
name: Upload docs
command: |
# Don't use "checkout" step since it uses ssh, which cannot git push
# https://circleci.com/docs/2.0/configuration-reference/#checkout
set -ex
# Change v1.12.1rc1 into 1.12 (only major.minor)
tag=$(echo $CIRCLE_TAG | sed -e 's/v*\([0-9]*\.[0-9]*\).*/\1/')
target=${tag:-main}
~/workspace/.circleci/build_docs/commit_docs.sh ~/workspace $target
workflows:
lint:
jobs:
- circleci_consistency
- lint_python_and_config
- lint_c
- type_check_python
build:
jobs:
{{ build_workflows(windows_latest_only=True) }}
{{ ios_workflows() }}
{{ android_workflows() }}
unittest:
jobs:
- unittest_torchhub
- unittest_onnx
- unittest_extended
{{ unittest_workflows() }}
cmake:
jobs:
{{ cmake_workflows() }}
nightly:
jobs:
{{ ios_workflows(nightly=True) }}
{{ android_workflows(nightly=True) }}
{{ build_workflows(prefix="nightly_", filter_branch="nightly", upload=True) }}
docker_build:
triggers:
- schedule:
cron: "0 10 * * 0"
filters:
branches:
only:
- main
jobs:
- smoke_test_docker_image_build:
context: org-member
#!/usr/bin/env python3
"""
This script should use a very simple, functional programming style.
Avoid Jinja macros in favor of native Python functions.
Don't go overboard on code generation; use Python only to generate
content that can't be easily declared statically using CircleCI's YAML API.
Data declarations (e.g. the nested loops for defining the configuration matrix)
should be at the top of the file for easy updating.
See this comment for design rationale:
https://github.com/pytorch/vision/pull/1321#issuecomment-531033978
"""
import os.path
import jinja2
import yaml
from jinja2 import select_autoescape
PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
RC_PATTERN = r"/v[0-9]+(\.[0-9]+)*-rc[0-9]+/"
def build_workflows(prefix="", filter_branch=None, upload=False, indentation=6, windows_latest_only=False):
w = []
for btype in ["wheel", "conda"]:
for os_type in ["linux", "macos", "win"]:
python_versions = PYTHON_VERSIONS
cu_versions_dict = {
"linux": ["cpu", "cu116", "cu117", "rocm5.1.1", "rocm5.2"],
"win": ["cpu", "cu116", "cu117"],
"macos": ["cpu"],
}
cu_versions = cu_versions_dict[os_type]
for python_version in python_versions:
for cu_version in cu_versions:
# ROCm conda packages not yet supported
if cu_version.startswith("rocm") and btype == "conda":
continue
for unicode in [False]:
fb = filter_branch
if (
windows_latest_only
and os_type == "win"
and filter_branch is None
and (
python_version != python_versions[-1]
or (cu_version not in [cu_versions[0], cu_versions[-1]])
)
):
fb = "main"
if not fb and (
os_type == "linux" and cu_version == "cpu" and btype == "wheel" and python_version == "3.7"
):
# the fields must match the build_docs "requires" dependency
fb = "/.*/"
w += workflow_pair(
btype, os_type, python_version, cu_version, unicode, prefix, upload, filter_branch=fb
)
if not filter_branch:
# Build on every pull request, but upload only on nightly and tags
w += build_doc_job("/.*/")
w += upload_doc_job("nightly")
return indent(indentation, w)
def workflow_pair(btype, os_type, python_version, cu_version, unicode, prefix="", upload=False, *, filter_branch=None):
w = []
unicode_suffix = "u" if unicode else ""
base_workflow_name = f"{prefix}binary_{os_type}_{btype}_py{python_version}{unicode_suffix}_{cu_version}"
w.append(
generate_base_workflow(
base_workflow_name, python_version, cu_version, unicode, os_type, btype, filter_branch=filter_branch
)
)
if upload:
w.append(generate_upload_workflow(base_workflow_name, os_type, btype, cu_version, filter_branch=filter_branch))
# disable smoke tests, they are broken and needs to be fixed
# if filter_branch == "nightly" and os_type in ["linux", "win"]:
# pydistro = "pip" if btype == "wheel" else "conda"
# w.append(generate_smoketest_workflow(pydistro, base_workflow_name, filter_branch, python_version, os_type))
return w
def build_doc_job(filter_branch):
job = {
"name": "build_docs",
"python_version": "3.7",
"requires": [
"binary_linux_wheel_py3.7_cpu",
],
}
if filter_branch:
job["filters"] = gen_filter_branch_tree(filter_branch, tags_list=RC_PATTERN)
return [{"build_docs": job}]
def upload_doc_job(filter_branch):
job = {
"name": "upload_docs",
"context": "org-member",
"python_version": "3.7",
"requires": [
"build_docs",
],
}
if filter_branch:
job["filters"] = gen_filter_branch_tree(filter_branch, tags_list=RC_PATTERN)
return [{"upload_docs": job}]
manylinux_images = {
"cu116": "pytorch/manylinux-cuda116",
"cu117": "pytorch/manylinux-cuda117",
}
def get_manylinux_image(cu_version):
if cu_version == "cpu":
return "pytorch/manylinux-cpu"
elif cu_version.startswith("cu"):
cu_suffix = cu_version[len("cu") :]
return f"pytorch/manylinux-cuda{cu_suffix}"
elif cu_version.startswith("rocm"):
rocm_suffix = cu_version[len("rocm") :]
return f"pytorch/manylinux-rocm:{rocm_suffix}"
def get_conda_image(cu_version):
if cu_version == "cpu":
return "pytorch/conda-builder:cpu"
elif cu_version.startswith("cu"):
cu_suffix = cu_version[len("cu") :]
return f"pytorch/conda-builder:cuda{cu_suffix}"
def generate_base_workflow(
base_workflow_name, python_version, cu_version, unicode, os_type, btype, *, filter_branch=None
):
d = {
"name": base_workflow_name,
"python_version": python_version,
"cu_version": cu_version,
}
if os_type != "win" and unicode:
d["unicode_abi"] = "1"
if os_type != "win":
d["wheel_docker_image"] = get_manylinux_image(cu_version)
# ROCm conda packages not yet supported
if "rocm" not in cu_version:
d["conda_docker_image"] = get_conda_image(cu_version)
if filter_branch is not None:
d["filters"] = {
"branches": {"only": filter_branch},
"tags": {
# Using a raw string here to avoid having to escape
# anything
"only": r"/v[0-9]+(\.[0-9]+)*-rc[0-9]+/"
},
}
w = f"binary_{os_type}_{btype}"
return {w: d}
def gen_filter_branch_tree(*branches, tags_list=None):
filter_dict = {"branches": {"only": [b for b in branches]}}
if tags_list is not None:
filter_dict["tags"] = {"only": tags_list}
return filter_dict
def generate_upload_workflow(base_workflow_name, os_type, btype, cu_version, *, filter_branch=None):
d = {
"name": f"{base_workflow_name}_upload",
"context": "org-member",
"requires": [base_workflow_name],
}
if btype == "wheel":
d["subfolder"] = "" if os_type == "macos" else cu_version + "/"
if filter_branch is not None:
d["filters"] = {
"branches": {"only": filter_branch},
"tags": {
# Using a raw string here to avoid having to escape
# anything
"only": r"/v[0-9]+(\.[0-9]+)*-rc[0-9]+/"
},
}
return {f"binary_{btype}_upload": d}
def generate_smoketest_workflow(pydistro, base_workflow_name, filter_branch, python_version, os_type):
required_build_suffix = "_upload"
required_build_name = base_workflow_name + required_build_suffix
smoke_suffix = f"smoke_test_{pydistro}"
d = {
"name": f"{base_workflow_name}_{smoke_suffix}",
"requires": [required_build_name],
"python_version": python_version,
}
if filter_branch:
d["filters"] = gen_filter_branch_tree(filter_branch)
return {f"smoke_test_{os_type}_{pydistro}": d}
def indent(indentation, data_list):
return ("\n" + " " * indentation).join(yaml.dump(data_list, default_flow_style=False).splitlines())
def unittest_workflows(indentation=6):
jobs = []
for os_type in ["linux", "windows", "macos"]:
for device_type in ["cpu", "gpu"]:
if os_type == "macos" and device_type == "gpu":
continue
for i, python_version in enumerate(PYTHON_VERSIONS):
job = {
"name": f"unittest_{os_type}_{device_type}_py{python_version}",
"python_version": python_version,
}
if device_type == "gpu":
if python_version != "3.8":
job["filters"] = gen_filter_branch_tree("main", "nightly")
job["cu_version"] = "cu116"
else:
job["cu_version"] = "cpu"
jobs.append({f"unittest_{os_type}_{device_type}": job})
return indent(indentation, jobs)
def cmake_workflows(indentation=6):
jobs = []
python_version = "3.8"
for os_type in ["linux", "windows", "macos"]:
# Skip OSX CUDA
device_types = ["cpu", "gpu"] if os_type != "macos" else ["cpu"]
for device in device_types:
job = {"name": f"cmake_{os_type}_{device}", "python_version": python_version}
job["cu_version"] = "cu116" if device == "gpu" else "cpu"
if device == "gpu" and os_type == "linux":
job["wheel_docker_image"] = "pytorch/manylinux-cuda116"
jobs.append({f"cmake_{os_type}_{device}": job})
return indent(indentation, jobs)
def ios_workflows(indentation=6, nightly=False):
jobs = []
build_job_names = []
name_prefix = "nightly_" if nightly else ""
env_prefix = "nightly-" if nightly else ""
for arch, platform in [("x86_64", "SIMULATOR"), ("arm64", "OS")]:
name = f"{name_prefix}binary_libtorchvision_ops_ios_12.0.0_{arch}"
build_job_names.append(name)
build_job = {
"build_environment": f"{env_prefix}binary-libtorchvision_ops-ios-12.0.0-{arch}",
"ios_arch": arch,
"ios_platform": platform,
"name": name,
}
if nightly:
build_job["filters"] = gen_filter_branch_tree("nightly")
jobs.append({"binary_ios_build": build_job})
if nightly:
upload_job = {
"build_environment": f"{env_prefix}binary-libtorchvision_ops-ios-12.0.0-upload",
"context": "org-member",
"filters": gen_filter_branch_tree("nightly"),
"requires": build_job_names,
}
jobs.append({"binary_ios_upload": upload_job})
return indent(indentation, jobs)
def android_workflows(indentation=6, nightly=False):
jobs = []
build_job_names = []
name_prefix = "nightly_" if nightly else ""
env_prefix = "nightly-" if nightly else ""
name = f"{name_prefix}binary_libtorchvision_ops_android"
build_job_names.append(name)
build_job = {
"build_environment": f"{env_prefix}binary-libtorchvision_ops-android",
"name": name,
}
if nightly:
upload_job = {
"build_environment": f"{env_prefix}binary-libtorchvision_ops-android-upload",
"context": "org-member",
"filters": gen_filter_branch_tree("nightly"),
"name": f"{name_prefix}binary_libtorchvision_ops_android_upload",
}
jobs.append({"binary_android_upload": upload_job})
else:
jobs.append({"binary_android_build": build_job})
return indent(indentation, jobs)
if __name__ == "__main__":
d = os.path.dirname(__file__)
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(d),
lstrip_blocks=True,
autoescape=select_autoescape(enabled_extensions=("html", "xml")),
keep_trailing_newline=True,
)
with open(os.path.join(d, "config.yml"), "w") as f:
f.write(
env.get_template("config.yml.in").render(
build_workflows=build_workflows,
unittest_workflows=unittest_workflows,
cmake_workflows=cmake_workflows,
ios_workflows=ios_workflows,
android_workflows=android_workflows,
)
)
# this Dockerfile is for torchvision smoke test, it will be created periodically via CI system
# if you need to do it locally, follow below steps once you have Docker installed
# assuming you're within the directory where this Dockerfile located
# $ docker build . -t torchvision/smoketest
# if you want to push to aws ecr, make sure you have the rights to write to ECR, then run
# $ eval $(aws ecr get-login --region us-east-1 --no-include-email)
# $ export MYTAG=localbuild ## you can choose whatever tag you like
# $ docker tag torchvision/smoketest 308535385114.dkr.ecr.us-east-1.amazonaws.com/torchvision/smoke_test:${MYTAG}
# $ docker push 308535385114.dkr.ecr.us-east-1.amazonaws.com/torchvision/smoke_test:${MYTAG}
FROM ubuntu:latest
RUN apt-get -qq update && apt-get -qq -y install curl bzip2 libsox-fmt-all \
&& curl -sSL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o /tmp/miniconda.sh \
&& bash /tmp/miniconda.sh -bfp /usr/local \
&& rm -rf /tmp/miniconda.sh \
&& conda install -y python=3 \
&& conda update conda \
&& apt-get -qq -y remove curl bzip2 \
&& apt-get -qq -y autoremove \
&& apt-get autoclean \
&& rm -rf /var/lib/apt/lists/* /var/log/dpkg.log \
&& conda clean --all --yes
ENV PATH /opt/conda/bin:$PATH
RUN conda create -y --name python3.7 python=3.7
RUN conda create -y --name python3.8 python=3.8
RUN conda create -y --name python3.9 python=3.9
RUN conda create -y --name python3.10 python=3.10
SHELL [ "/bin/bash", "-c" ]
RUN echo "source /usr/local/etc/profile.d/conda.sh" >> ~/.bashrc
CMD [ "/bin/bash"]
#!/bin/bash
set -ex -o pipefail
echo "DIR: $(pwd)"
echo "ANDROID_HOME=${ANDROID_HOME}"
echo "ANDROID_NDK_HOME=${ANDROID_NDK_HOME}"
echo "JAVA_HOME=${JAVA_HOME}"
WORKSPACE=/home/circleci/workspace
VISION_ANDROID=/home/circleci/project/android
. /home/circleci/project/.circleci/unittest/android/scripts/install_gradle.sh
GRADLE_LOCAL_PROPERTIES=${VISION_ANDROID}/local.properties
rm -f $GRADLE_LOCAL_PROPERTIES
echo "sdk.dir=${ANDROID_HOME}" >> $GRADLE_LOCAL_PROPERTIES
echo "ndk.dir=${ANDROID_NDK_HOME}" >> $GRADLE_LOCAL_PROPERTIES
echo "GRADLE_PATH $GRADLE_PATH"
echo "GRADLE_HOME $GRADLE_HOME"
${GRADLE_PATH} --scan --stacktrace --debug --no-daemon -p ${VISION_ANDROID} assemble || true
mkdir -p ~/workspace/artifacts
find . -type f -name *aar -print | xargs tar cfvz ~/workspace/artifacts/artifacts-aars.tgz
find . -type f -name *apk -print | xargs tar cfvz ~/workspace/artifacts/artifacts-apks.tgz
#!/bin/bash
set -ex -o pipefail
echo "DIR: $(pwd)"
echo "ANDROID_HOME=${ANDROID_HOME}"
echo "ANDROID_NDK_HOME=${ANDROID_NDK_HOME}"
echo "JAVA_HOME=${JAVA_HOME}"
WORKSPACE=/home/circleci/workspace
VISION_ANDROID=/home/circleci/project/android
. /home/circleci/project/.circleci/unittest/android/scripts/install_gradle.sh
GRADLE_LOCAL_PROPERTIES=${VISION_ANDROID}/local.properties
rm -f $GRADLE_LOCAL_PROPERTIES
GRADLE_PROPERTIES=/home/circleci/project/android/gradle.properties
echo "sdk.dir=${ANDROID_HOME}" >> $GRADLE_LOCAL_PROPERTIES
echo "ndk.dir=${ANDROID_NDK_HOME}" >> $GRADLE_LOCAL_PROPERTIES
echo "SONATYPE_NEXUS_USERNAME=${SONATYPE_NEXUS_USERNAME}" >> $GRADLE_PROPERTIES
echo "mavenCentralRepositoryUsername=${SONATYPE_NEXUS_USERNAME}" >> $GRADLE_PROPERTIES
echo "SONATYPE_NEXUS_PASSWORD=${SONATYPE_NEXUS_PASSWORD}" >> $GRADLE_PROPERTIES
echo "mavenCentralRepositoryPassword=${SONATYPE_NEXUS_PASSWORD}" >> $GRADLE_PROPERTIES
echo "signing.keyId=${ANDROID_SIGN_KEY}" >> $GRADLE_PROPERTIES
echo "signing.password=${ANDROID_SIGN_PASS}" >> $GRADLE_PROPERTIES
cat /home/circleci/project/android/gradle.properties | grep VERSION
${GRADLE_PATH} --scan --stacktrace --debug --no-daemon -p ${VISION_ANDROID} ops:uploadArchives
mkdir -p ~/workspace/artifacts
find . -type f -name *aar -print | xargs tar cfvz ~/workspace/artifacts/artifacts-aars.tgz
#!/bin/bash
set -ex
_https_amazon_aws=https://ossci-android.s3.amazonaws.com
GRADLE_VERSION=6.8.3
_gradle_home=/opt/gradle
sudo rm -rf $gradle_home
sudo mkdir -p $_gradle_home
curl --silent --output /tmp/gradle.zip --retry 3 $_https_amazon_aws/gradle-${GRADLE_VERSION}-bin.zip
sudo unzip -q /tmp/gradle.zip -d $_gradle_home
rm /tmp/gradle.zip
sudo chmod -R 777 $_gradle_home
export GRADLE_HOME=$_gradle_home/gradle-$GRADLE_VERSION
export GRADLE_PATH=${GRADLE_HOME}/bin/gradle
#!/bin/bash
set -ex -o pipefail
echo ""
echo "DIR: $(pwd)"
WORKSPACE=/Users/distiller/workspace
PROJ_ROOT_IOS=/Users/distiller/project/ios
PYTORCH_IOS_NIGHTLY_NAME=libtorch_ios_nightly_build.zip
export TCLLIBPATH="/usr/local/lib"
# install conda
curl --retry 3 -o ~/conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
chmod +x ~/conda.sh
/bin/bash ~/conda.sh -b -p ~/anaconda
export PATH="~/anaconda/bin:${PATH}"
source ~/anaconda/bin/activate
# install dependencies
conda install numpy ninja pyyaml mkl mkl-include setuptools cmake cffi requests typing_extensions wget --yes
conda install -c conda-forge valgrind --yes
export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"}
# sync submodules
cd ${PROJ_ROOT_IOS}
git submodule sync
git submodule update --init --recursive
# download pytorch-iOS nightly build and unzip it
mkdir -p ${PROJ_ROOT_IOS}/lib
mkdir -p ${PROJ_ROOT_IOS}/build
mkdir -p ${PROJ_ROOT_IOS}/pytorch
TORCH_ROOT="${PROJ_ROOT_IOS}/pytorch"
cd ${TORCH_ROOT}
wget https://ossci-ios-build.s3.amazonaws.com/${PYTORCH_IOS_NIGHTLY_NAME}
mkdir -p ./build_ios
unzip -d ./build_ios ./${PYTORCH_IOS_NIGHTLY_NAME}
LIBTORCH_HEADER_ROOT="${TORCH_ROOT}/build_ios/install/include"
cd ${PROJ_ROOT_IOS}
IOS_ARCH=${IOS_ARCH} LIBTORCH_HEADER_ROOT=${LIBTORCH_HEADER_ROOT} ./build_ios.sh
rm -rf ${TORCH_ROOT}
# store the binary
DEST_DIR=${WORKSPACE}/ios/${IOS_ARCH}
mkdir -p ${DEST_DIR}
cp ${PROJ_ROOT_IOS}/lib/*.a ${DEST_DIR}
#!/bin/bash
set -ex -o pipefail
echo ""
echo "DIR: $(pwd)"
WORKSPACE=/Users/distiller/workspace
PROJ_ROOT=/Users/distiller/project
ARTIFACTS_DIR=${WORKSPACE}/ios
ls ${ARTIFACTS_DIR}
ZIP_DIR=${WORKSPACE}/zip
mkdir -p ${ZIP_DIR}/install/lib
# build a FAT bianry
cd ${ZIP_DIR}/install/lib
libs=("${ARTIFACTS_DIR}/x86_64/libtorchvision_ops.a" "${ARTIFACTS_DIR}/arm64/libtorchvision_ops.a")
lipo -create "${libs[@]}" -o ${ZIP_DIR}/install/lib/libtorchvision_ops.a
lipo -i ${ZIP_DIR}/install/lib/*.a
# copy the license
cp ${PROJ_ROOT}/LICENSE ${ZIP_DIR}/
# zip the library
ZIPFILE=libtorchvision_ops_ios_nightly_build.zip
cd ${ZIP_DIR}
#for testing
touch version.txt
echo $(date +%s) > version.txt
zip -r ${ZIPFILE} install version.txt LICENSE
# upload to aws
# Install conda then 'conda install' awscli
curl --retry 3 -o ~/conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
chmod +x ~/conda.sh
/bin/bash ~/conda.sh -b -p ~/anaconda
export PATH="~/anaconda/bin:${PATH}"
source ~/anaconda/bin/activate
conda install -c conda-forge awscli --yes
set +x
export AWS_ACCESS_KEY_ID=${AWS_S3_ACCESS_KEY_FOR_PYTORCH_BINARY_UPLOAD}
export AWS_SECRET_ACCESS_KEY=${AWS_S3_ACCESS_SECRET_FOR_PYTORCH_BINARY_UPLOAD}
set -x
aws s3 cp ${ZIPFILE} s3://ossci-ios-build/ --acl public-read
channels:
- pytorch
- defaults
dependencies:
- pytest
- pytest-cov
- pytest-mock
- pip
- libpng
- jpeg
- ca-certificates
- h5py
- pip:
- future
- scipy
- av
#!/usr/bin/env bash
unset PYTORCH_VERSION
# For unittest, nightly PyTorch is used as the following section,
# so no need to set PYTORCH_VERSION.
# In fact, keeping PYTORCH_VERSION forces us to hardcode PyTorch version in config.
set -e
eval "$(./conda/bin/conda shell.bash hook)"
conda activate ./env
if [ "${CU_VERSION:-}" == cpu ] ; then
cudatoolkit="cpuonly"
version="cpu"
else
if [[ ${#CU_VERSION} -eq 4 ]]; then
CUDA_VERSION="${CU_VERSION:2:1}.${CU_VERSION:3:1}"
elif [[ ${#CU_VERSION} -eq 5 ]]; then
CUDA_VERSION="${CU_VERSION:2:2}.${CU_VERSION:4:1}"
fi
echo "Using CUDA $CUDA_VERSION as determined by CU_VERSION: ${CU_VERSION} "
version="$(python -c "print('.'.join(\"${CUDA_VERSION}\".split('.')[:2]))")"
cudatoolkit="pytorch-cuda=${version}"
fi
case "$(uname -s)" in
Darwin*) os=MacOSX;;
*) os=Linux
esac
printf "Installing PyTorch with %s\n" "${cudatoolkit}"
if [ "${os}" == "MacOSX" ]; then
conda install -y -c "pytorch-${UPLOAD_CHANNEL}" "pytorch-${UPLOAD_CHANNEL}"::pytorch "${cudatoolkit}"
else
conda install -y -c "pytorch-${UPLOAD_CHANNEL}" -c nvidia "pytorch-${UPLOAD_CHANNEL}"::pytorch[build="*${version}*"] "${cudatoolkit}"
fi
printf "* Installing torchvision\n"
python setup.py develop
#!/usr/bin/env bash
set -e
eval "$(./conda/bin/conda shell.bash hook)"
conda activate ./env
#!/usr/bin/env bash
set -e
eval "$(./conda/bin/conda shell.bash hook)"
conda activate ./env
python -m torch.utils.collect_env
pytest --junitxml=test-results/junit.xml -v --durations 20
#!/usr/bin/env bash
# This script is for setting up environment in which unit test is ran.
# To speed up the CI time, the resulting environment is cached.
#
# Do not install PyTorch and torchvision here, otherwise they also get cached.
set -e
this_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
# Avoid error: "fatal: unsafe repository"
git config --global --add safe.directory '*'
root_dir="$(git rev-parse --show-toplevel)"
conda_dir="${root_dir}/conda"
env_dir="${root_dir}/env"
cd "${root_dir}"
case "$(uname -s)" in
Darwin*) os=MacOSX;;
*) os=Linux
esac
# 1. Install conda at ./conda
if [ ! -d "${conda_dir}" ]; then
printf "* Installing conda\n"
wget -O miniconda.sh "http://repo.continuum.io/miniconda/Miniconda3-latest-${os}-x86_64.sh"
bash ./miniconda.sh -b -f -p "${conda_dir}"
fi
eval "$(${conda_dir}/bin/conda shell.bash hook)"
# 2. Create test environment at ./env
if [ ! -d "${env_dir}" ]; then
printf "* Creating a test environment\n"
conda create --prefix "${env_dir}" -y python="$PYTHON_VERSION"
fi
conda activate "${env_dir}"
# 3. Install Conda dependencies
printf "* Installing dependencies (except PyTorch)\n"
FFMPEG_PIN="=4.2"
if [[ "${PYTHON_VERSION}" = "3.9" ]]; then
FFMPEG_PIN=">=4.2"
fi
conda install -y -c pytorch "ffmpeg${FFMPEG_PIN}"
conda env update --file "${this_dir}/environment.yml" --prune
channels:
- pytorch
- defaults
dependencies:
- pytest
- pytest-cov
- pytest-mock
- pip
- libpng
- jpeg
- ca-certificates
- hdf5
- setuptools
- pip:
- future
- scipy
- av != 9.1.1
- dataclasses
- h5py
#!/usr/bin/env bash
unset PYTORCH_VERSION
# For unittest, nightly PyTorch is used as the following section,
# so no need to set PYTORCH_VERSION.
# In fact, keeping PYTORCH_VERSION forces us to hardcode PyTorch version in config.
set -ex
this_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
eval "$(./conda/Scripts/conda.exe 'shell.bash' 'hook')"
conda activate ./env
# TODO, refactor the below logic to make it easy to understand how to get correct cuda_version.
if [ "${CU_VERSION:-}" == cpu ] ; then
cudatoolkit="cpuonly"
version="cpu"
else
if [[ ${#CU_VERSION} -eq 4 ]]; then
CUDA_VERSION="${CU_VERSION:2:1}.${CU_VERSION:3:1}"
elif [[ ${#CU_VERSION} -eq 5 ]]; then
CUDA_VERSION="${CU_VERSION:2:2}.${CU_VERSION:4:1}"
fi
cuda_toolkit_pckg="cudatoolkit"
if [[ $CUDA_VERSION == 11.6 || $CUDA_VERSION == 11.7 ]]; then
cuda_toolkit_pckg="pytorch-cuda"
fi
echo "Using CUDA $CUDA_VERSION as determined by CU_VERSION"
version="$(python -c "print('.'.join(\"${CUDA_VERSION}\".split('.')[:2]))")"
cudatoolkit="${cuda_toolkit_pckg}=${version}"
fi
printf "Installing PyTorch with %s\n" "${cudatoolkit}"
conda install -y -c "pytorch-${UPLOAD_CHANNEL}" -c nvidia "pytorch-${UPLOAD_CHANNEL}"::pytorch[build="*${version}*"] "${cudatoolkit}"
torch_cuda=$(python -c "import torch; print(torch.cuda.is_available())")
echo torch.cuda.is_available is $torch_cuda
if [ ! -z "${CUDA_VERSION:-}" ] ; then
if [ "$torch_cuda" == "False" ]; then
echo "torch with cuda installed but torch.cuda.is_available() is False"
exit 1
fi
fi
source "$this_dir/set_cuda_envs.sh"
printf "* Installing torchvision\n"
"$this_dir/vc_env_helper.bat" python setup.py develop
start /wait "" "%miniconda_exe%" /S /InstallationType=JustMe /RegisterPython=0 /AddToPath=0 /D=%tmp_conda%
#!/usr/bin/env bash
set -e
eval "$(./conda/Scripts/conda.exe 'shell.bash' 'hook')"
conda activate ./env
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment