Unverified Commit 69de8c4b authored by liuzhe-lz's avatar liuzhe-lz Committed by GitHub
Browse files

Move pipeline cache to azure blob (#4726)

parent 8499d63f
trigger:
branches:
exclude: [ l10n_master ]
trigger: none
stages:
- stage: lint
......@@ -12,7 +10,6 @@ stages:
- template: templates/install-dependencies.yml
parameters:
platform: ubuntu-latest
useCache: ${{ eq('$(USE_CACHE)', 'true') }}
- script: |
sudo apt-get install -y pandoc
......@@ -52,7 +49,6 @@ stages:
- template: templates/install-dependencies.yml
parameters:
platform: ubuntu-latest
useCache: ${{ eq('$(USE_CACHE)', 'true') }}
- script: |
# pylint requires newer typing extension. Override requirements in tensorflow
......@@ -81,7 +77,6 @@ stages:
- template: templates/install-dependencies.yml
parameters:
platform: ubuntu-latest
useCache: ${{ eq('$(USE_CACHE)', 'true') }}
- script: |
set -e
......@@ -108,7 +103,6 @@ stages:
- template: templates/install-dependencies.yml
parameters:
platform: ubuntu-latest
useCache: ${{ eq('$(USE_CACHE)', 'true') }}
- script: |
python setup.py develop
......@@ -162,7 +156,6 @@ stages:
- template: templates/install-dependencies.yml
parameters:
platform: ubuntu-legacy
useCache: ${{ eq('$(USE_CACHE)', 'true') }}
- script: |
python setup.py develop
......@@ -194,7 +187,6 @@ stages:
- template: templates/install-dependencies.yml
parameters:
platform: windows
useCache: ${{ eq('$(USE_CACHE)', 'true') }}
- script: |
python setup.py develop --no-user
......@@ -222,7 +214,6 @@ stages:
- template: templates/install-dependencies.yml
parameters:
platform: macos
useCache: ${{ eq('$(USE_CACHE)', 'true') }}
- script: |
python setup.py develop
......
......@@ -2,6 +2,16 @@ parameters:
- name: platform
type: string
# variables set on VSO: (for security concern)
# sas_token
#
# its value should be:
# sp=racw&st=2022-04-01T00:00:00Z&se=......
#
# how to re-generate:
# 1. find the storage on azure portal: nni (resource group) -> nni (storage account) -> cache (container)
# 2. settings - shared access tokens - generate sas token and url
steps:
- template: config-version.yml
parameters:
......@@ -63,12 +73,15 @@ steps:
displayName: Install Web UI dependencies
- script: |
python test/vso_tools/pack_dependencies.py $(Build.ArtifactStagingDirectory)
python test/vso_tools/pack_dependencies.py dependencies.zip
displayName: Create cache archive
- task: UniversalPackages@0
inputs:
command: publish
vstsFeedPublish: NNIOpenSource/dependencies
vstsFeedPackagePublish: dependencies-${{ parameters.platform }}
displayName: Upload cache archive
- script: |
azcopy copy dependencies.zip 'https://nni.blob.core.windows.net/cache/dependencies-${{ parameters.platform }}.zip?$(sas_token)'
displayName: (POSIX) Upload cache archive
condition: and(succeeded(), not(contains('${{ parameters.platform }}', 'windows')))
- powershell: |
azcopy copy dependencies.zip 'https://nni.blob.core.windows.net/cache/dependencies-${{ parameters.platform }}.zip?$(sas_token)'
displayName: (Windows) Upload cache archive
condition: and(succeeded(), contains('${{ parameters.platform }}', 'windows'))
parameters:
- name: platform
type: string
- name: useCache
type: boolean
steps:
- template: config-version.yml
......@@ -25,18 +23,20 @@ steps:
displayName: (macOS) Downgrade swig
condition: and(succeeded(), contains('${{ parameters.platform }}', 'macos'))
- task: UniversalPackages@0
inputs:
vstsFeed: NNIOpenSource/dependencies
vstsFeedPackage: dependencies-${{ parameters.platform }}
vstsPackageVersion: "*"
displayName: Download cache
condition: and(succeeded(), ${{ parameters.useCache }})
- script: |
python test/vso_tools/unpack_dependencies.py
displayName: Unpack cache
condition: and(succeeded(), ${{ parameters.useCache }})
set -e
azcopy copy 'https://nni.blob.core.windows.net/cache/dependencies-${{ parameters.platform }}.zip' dependencies.zip
python test/vso_tools/unpack_dependencies.py dependencies.zip
displayName: (POSIX) Download cache
condition: and(succeeded(), not(contains('${{ parameters.platform }}', 'windows')))
continueOnError: true
- powershell: |
azcopy copy 'https://nni.blob.core.windows.net/cache/dependencies-${{ parameters.platform }}.zip' dependencies.zip
python test/vso_tools/unpack_dependencies.py dependencies.zip
displayName: (Windows) Download cache
condition: and(succeeded(), contains('${{ parameters.platform }}', 'windows'))
continueOnError: true
- script: |
mv dependencies/recommended_legacy.txt dependencies/recommended.txt
......@@ -54,13 +54,6 @@ steps:
python -m pip install -r dependencies/recommended.txt
displayName: Install Python dependencies
# FIXME: See cache-dependencies-template.yml on why it needs rebuild.
- script: |
python -m pip uninstall -y ConfigSpaceNNI
python -m pip install --no-cache-dir ConfigSpaceNNI
displayName: Rebuild ConfigSpaceNNI
condition: and(succeeded(), not(${{ parameters.useCache }}), contains('${{ parameters.platform }}', 'ubuntu'))
# TODO: Delete this after upgrading to PyTorch 1.11.
- script: |
python test/vso_tools/interim_patch.py
......
"""
Create an archive in sys.argv[1], containing python-packages and node_modules.
Create a zip archive named sys.argv[1], containing python-packages and node_modules.
Use unpack_dependencies.py to extract the archive.
Example usage:
python test/vso_tools/pack_dependencies.py dependencies.zip
"""
import json
......@@ -18,7 +22,7 @@ def main() -> None:
shutil.move('ts/nni_manager/node_modules', 'cache/nni-manager-dependencies')
shutil.move('ts/webui/node_modules', 'cache/webui-dependencies')
archive = ZipFile('cache.zip', 'w', ZIP_DEFLATED, compresslevel=9)
archive = ZipFile(sys.argv[1], 'w', ZIP_DEFLATED, compresslevel=9)
symlinks = {}
empty_dirs = set()
for file in sorted(cache.rglob('*')):
......@@ -36,8 +40,5 @@ def main() -> None:
archive.writestr('directories.json', json.dumps(list(empty_dirs), indent=4))
archive.close()
assert Path(sys.argv[1]).is_dir()
shutil.move('cache.zip', sys.argv[1])
if __name__ == '__main__':
main()
"""
Extract an archive created by pack_dependencies.py.
Extract archive sys.argv[1] created by pack_dependencies.py.
"""
from __future__ import annotations
......@@ -23,7 +23,7 @@ def main() -> None:
print('All Python paths:')
print('\n'.join(sys.path), flush=True)
extract_all(ZipFile('cache.zip'))
extract_all(ZipFile(sys.argv[1]))
empty_dirs = json.loads(Path('directories.json').read_text())
symlinks = json.loads(Path('symlinks.json').read_text())
for dir_ in empty_dirs:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment