Unverified Commit 2ed97b61 authored by moto's avatar moto Committed by GitHub
Browse files

Limit the number of downloads par workflow for reliable build (#670)

* Add caching to binary build for more reliable build

* Apply same logic to unittest workflow
parent 449b6abf
...@@ -60,6 +60,34 @@ jobs: ...@@ -60,6 +60,34 @@ jobs:
python .circleci/regenerate.py python .circleci/regenerate.py
git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1) git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1)
download_third_parties_nix:
docker:
- image: "centos:8"
resource_class: small
steps:
- checkout
- run:
name: Generate cache key
# This will refresh cache on daily
command: echo "$(date +"%Y-%U-%d")" > .circleci-daily
- restore_cache:
keys:
- tp-nix-{{ checksum ".circleci-daily" }}-{{ checksum "./build_tools/setup_helpers/build_third_party.sh" }}-{{ checksum "./build_tools/setup_helpers/build_third_party_helper.sh" }}
- run:
command: ./build_tools/setup_helpers/build_third_party.sh $PWD --download-only
- save_cache:
key: tp-nix-{{ checksum ".circleci-daily" }}-{{ checksum "./build_tools/setup_helpers/build_third_party.sh" }}-{{ checksum "./build_tools/setup_helpers/build_third_party_helper.sh" }}
paths:
- third_party/tmp
- persist_to_workspace:
root: third_party
paths:
- tmp
binary_linux_wheel: binary_linux_wheel:
<<: *binary_common <<: *binary_common
docker: docker:
...@@ -67,6 +95,8 @@ jobs: ...@@ -67,6 +95,8 @@ jobs:
resource_class: 2xlarge+ resource_class: 2xlarge+
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: packaging/build_wheel.sh - run: packaging/build_wheel.sh
- store_artifacts: - store_artifacts:
path: dist path: dist
...@@ -82,6 +112,8 @@ jobs: ...@@ -82,6 +112,8 @@ jobs:
resource_class: 2xlarge+ resource_class: 2xlarge+
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: packaging/build_conda.sh - run: packaging/build_conda.sh
- store_artifacts: - store_artifacts:
path: /opt/conda/conda-bld/linux-64 path: /opt/conda/conda-bld/linux-64
...@@ -96,6 +128,8 @@ jobs: ...@@ -96,6 +128,8 @@ jobs:
xcode: "9.0" xcode: "9.0"
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
# Cannot easily deduplicate this as source'ing activate # Cannot easily deduplicate this as source'ing activate
# will set environment variables which we need to propagate # will set environment variables which we need to propagate
...@@ -118,6 +152,8 @@ jobs: ...@@ -118,6 +152,8 @@ jobs:
xcode: "9.0" xcode: "9.0"
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
command: | command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
...@@ -322,6 +358,8 @@ jobs: ...@@ -322,6 +358,8 @@ jobs:
resource_class: 2xlarge+ resource_class: 2xlarge+
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
name: Generate cache key name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache. # This will refresh cache on Sundays, nightly build should generate new cache.
...@@ -363,6 +401,8 @@ jobs: ...@@ -363,6 +401,8 @@ jobs:
image_name: "pytorch/torchaudio_unittest_base:manylinux-cuda10.1" image_name: "pytorch/torchaudio_unittest_base:manylinux-cuda10.1"
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
name: Generate cache key name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache. # This will refresh cache on Sundays, nightly build should generate new cache.
...@@ -506,24 +546,38 @@ workflows: ...@@ -506,24 +546,38 @@ workflows:
build: build:
jobs: jobs:
- circleci_consistency - circleci_consistency
- download_third_parties_nix:
name: download_third_parties_nix
- binary_linux_wheel: - binary_linux_wheel:
name: binary_linux_wheel_py3.6 name: binary_linux_wheel_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_linux_wheel: - binary_linux_wheel:
name: binary_linux_wheel_py3.7 name: binary_linux_wheel_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_linux_wheel: - binary_linux_wheel:
name: binary_linux_wheel_py3.8 name: binary_linux_wheel_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_macos_wheel: - binary_macos_wheel:
name: binary_macos_wheel_py3.6 name: binary_macos_wheel_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_macos_wheel: - binary_macos_wheel:
name: binary_macos_wheel_py3.7 name: binary_macos_wheel_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_macos_wheel: - binary_macos_wheel:
name: binary_macos_wheel_py3.8 name: binary_macos_wheel_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_windows_wheel: - binary_windows_wheel:
name: binary_windows_wheel_py3.6 name: binary_windows_wheel_py3.6
python_version: '3.6' python_version: '3.6'
...@@ -536,21 +590,33 @@ workflows: ...@@ -536,21 +590,33 @@ workflows:
- binary_linux_conda: - binary_linux_conda:
name: binary_linux_conda_py3.6 name: binary_linux_conda_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_linux_conda: - binary_linux_conda:
name: binary_linux_conda_py3.7 name: binary_linux_conda_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_linux_conda: - binary_linux_conda:
name: binary_linux_conda_py3.8 name: binary_linux_conda_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_macos_conda: - binary_macos_conda:
name: binary_macos_conda_py3.6 name: binary_macos_conda_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_macos_conda: - binary_macos_conda:
name: binary_macos_conda_py3.7 name: binary_macos_conda_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_macos_conda: - binary_macos_conda:
name: binary_macos_conda_py3.8 name: binary_macos_conda_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_windows_conda: - binary_windows_conda:
name: binary_windows_conda_py3.6 name: binary_windows_conda_py3.6
python_version: '3.6' python_version: '3.6'
...@@ -562,36 +628,50 @@ workflows: ...@@ -562,36 +628,50 @@ workflows:
python_version: '3.8' python_version: '3.8'
unittest: unittest:
jobs: jobs:
- download_third_parties_nix:
name: download_third_parties_nix
- unittest_linux_cpu: - unittest_linux_cpu:
name: unittest_linux_cpu_py3.6 name: unittest_linux_cpu_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- stylecheck: - stylecheck:
name: stylecheck_py3.6 name: stylecheck_py3.6
python_version: '3.6' python_version: '3.6'
- unittest_linux_cpu: - unittest_linux_cpu:
name: unittest_linux_cpu_py3.7 name: unittest_linux_cpu_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- unittest_linux_cpu: - unittest_linux_cpu:
name: unittest_linux_cpu_py3.8 name: unittest_linux_cpu_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- unittest_linux_gpu: - unittest_linux_gpu:
filters: filters:
branches: branches:
only: master only: master
name: unittest_linux_gpu_py3.6 name: unittest_linux_gpu_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- unittest_linux_gpu: - unittest_linux_gpu:
filters: filters:
branches: branches:
only: master only: master
name: unittest_linux_gpu_py3.7 name: unittest_linux_gpu_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- unittest_linux_gpu: - unittest_linux_gpu:
filters: filters:
branches: branches:
only: master only: master
name: unittest_linux_gpu_py3.8 name: unittest_linux_gpu_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- unittest_windows_cpu: - unittest_windows_cpu:
name: unittest_windows_cpu_py3.6 name: unittest_windows_cpu_py3.6
python_version: '3.6' python_version: '3.6'
...@@ -625,12 +705,19 @@ workflows: ...@@ -625,12 +705,19 @@ workflows:
filters: filters:
branches: branches:
only: nightly only: nightly
- download_third_parties_nix:
filters:
branches:
only: nightly
name: download_third_parties_nix
- binary_linux_wheel: - binary_linux_wheel:
filters: filters:
branches: branches:
only: nightly only: nightly
name: nightly_binary_linux_wheel_py3.6 name: nightly_binary_linux_wheel_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_wheel_upload: - binary_wheel_upload:
context: org-member context: org-member
filters: filters:
...@@ -653,6 +740,8 @@ workflows: ...@@ -653,6 +740,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_linux_wheel_py3.7 name: nightly_binary_linux_wheel_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_wheel_upload: - binary_wheel_upload:
context: org-member context: org-member
filters: filters:
...@@ -675,6 +764,8 @@ workflows: ...@@ -675,6 +764,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_linux_wheel_py3.8 name: nightly_binary_linux_wheel_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_wheel_upload: - binary_wheel_upload:
context: org-member context: org-member
filters: filters:
...@@ -697,6 +788,8 @@ workflows: ...@@ -697,6 +788,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_macos_wheel_py3.6 name: nightly_binary_macos_wheel_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_wheel_upload: - binary_wheel_upload:
context: org-member context: org-member
filters: filters:
...@@ -711,6 +804,8 @@ workflows: ...@@ -711,6 +804,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_macos_wheel_py3.7 name: nightly_binary_macos_wheel_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_wheel_upload: - binary_wheel_upload:
context: org-member context: org-member
filters: filters:
...@@ -725,6 +820,8 @@ workflows: ...@@ -725,6 +820,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_macos_wheel_py3.8 name: nightly_binary_macos_wheel_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_wheel_upload: - binary_wheel_upload:
context: org-member context: org-member
filters: filters:
...@@ -805,6 +902,8 @@ workflows: ...@@ -805,6 +902,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_linux_conda_py3.6 name: nightly_binary_linux_conda_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_conda_upload: - binary_conda_upload:
context: org-member context: org-member
filters: filters:
...@@ -827,6 +926,8 @@ workflows: ...@@ -827,6 +926,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_linux_conda_py3.7 name: nightly_binary_linux_conda_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_conda_upload: - binary_conda_upload:
context: org-member context: org-member
filters: filters:
...@@ -849,6 +950,8 @@ workflows: ...@@ -849,6 +950,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_linux_conda_py3.8 name: nightly_binary_linux_conda_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_conda_upload: - binary_conda_upload:
context: org-member context: org-member
filters: filters:
...@@ -871,6 +974,8 @@ workflows: ...@@ -871,6 +974,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_macos_conda_py3.6 name: nightly_binary_macos_conda_py3.6
python_version: '3.6' python_version: '3.6'
requires:
- download_third_parties_nix
- binary_conda_upload: - binary_conda_upload:
context: org-member context: org-member
filters: filters:
...@@ -885,6 +990,8 @@ workflows: ...@@ -885,6 +990,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_macos_conda_py3.7 name: nightly_binary_macos_conda_py3.7
python_version: '3.7' python_version: '3.7'
requires:
- download_third_parties_nix
- binary_conda_upload: - binary_conda_upload:
context: org-member context: org-member
filters: filters:
...@@ -899,6 +1006,8 @@ workflows: ...@@ -899,6 +1006,8 @@ workflows:
only: nightly only: nightly
name: nightly_binary_macos_conda_py3.8 name: nightly_binary_macos_conda_py3.8
python_version: '3.8' python_version: '3.8'
requires:
- download_third_parties_nix
- binary_conda_upload: - binary_conda_upload:
context: org-member context: org-member
filters: filters:
......
...@@ -60,6 +60,34 @@ jobs: ...@@ -60,6 +60,34 @@ jobs:
python .circleci/regenerate.py python .circleci/regenerate.py
git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1) git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1)
download_third_parties_nix:
docker:
- image: "centos:8"
resource_class: small
steps:
- checkout
- run:
name: Generate cache key
# This will refresh cache on daily
command: echo "$(date +"%Y-%U-%d")" > .circleci-daily
- restore_cache:
{% raw %}
keys:
- tp-nix-{{ checksum ".circleci-daily" }}-{{ checksum "./build_tools/setup_helpers/build_third_party.sh" }}-{{ checksum "./build_tools/setup_helpers/build_third_party_helper.sh" }}
{% endraw %}
- run:
command: ./build_tools/setup_helpers/build_third_party.sh $PWD --download-only
- save_cache:
{% raw %}
key: tp-nix-{{ checksum ".circleci-daily" }}-{{ checksum "./build_tools/setup_helpers/build_third_party.sh" }}-{{ checksum "./build_tools/setup_helpers/build_third_party_helper.sh" }}
{% endraw %}
paths:
- third_party/tmp
- persist_to_workspace:
root: third_party
paths:
- tmp
binary_linux_wheel: binary_linux_wheel:
<<: *binary_common <<: *binary_common
docker: docker:
...@@ -67,6 +95,8 @@ jobs: ...@@ -67,6 +95,8 @@ jobs:
resource_class: 2xlarge+ resource_class: 2xlarge+
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: packaging/build_wheel.sh - run: packaging/build_wheel.sh
- store_artifacts: - store_artifacts:
path: dist path: dist
...@@ -82,6 +112,8 @@ jobs: ...@@ -82,6 +112,8 @@ jobs:
resource_class: 2xlarge+ resource_class: 2xlarge+
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: packaging/build_conda.sh - run: packaging/build_conda.sh
- store_artifacts: - store_artifacts:
path: /opt/conda/conda-bld/linux-64 path: /opt/conda/conda-bld/linux-64
...@@ -96,6 +128,8 @@ jobs: ...@@ -96,6 +128,8 @@ jobs:
xcode: "9.0" xcode: "9.0"
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
# Cannot easily deduplicate this as source'ing activate # Cannot easily deduplicate this as source'ing activate
# will set environment variables which we need to propagate # will set environment variables which we need to propagate
...@@ -118,6 +152,8 @@ jobs: ...@@ -118,6 +152,8 @@ jobs:
xcode: "9.0" xcode: "9.0"
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
command: | command: |
curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
...@@ -322,6 +358,8 @@ jobs: ...@@ -322,6 +358,8 @@ jobs:
resource_class: 2xlarge+ resource_class: 2xlarge+
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
name: Generate cache key name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache. # This will refresh cache on Sundays, nightly build should generate new cache.
...@@ -363,6 +401,8 @@ jobs: ...@@ -363,6 +401,8 @@ jobs:
image_name: "pytorch/torchaudio_unittest_base:manylinux-cuda10.1" image_name: "pytorch/torchaudio_unittest_base:manylinux-cuda10.1"
steps: steps:
- checkout - checkout
- attach_workspace:
at: third_party
- run: - run:
name: Generate cache key name: Generate cache key
# This will refresh cache on Sundays, nightly build should generate new cache. # This will refresh cache on Sundays, nightly build should generate new cache.
......
...@@ -24,6 +24,7 @@ PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] ...@@ -24,6 +24,7 @@ PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
def build_workflows(prefix='', upload=False, filter_branch=None, indentation=6): def build_workflows(prefix='', upload=False, filter_branch=None, indentation=6):
w = [] w = []
w += build_download_job(filter_branch)
for btype in ["wheel", "conda"]: for btype in ["wheel", "conda"]:
for os_type in ["linux", "macos", "windows"]: for os_type in ["linux", "macos", "windows"]:
for python_version in PYTHON_VERSIONS: for python_version in PYTHON_VERSIONS:
...@@ -32,6 +33,16 @@ def build_workflows(prefix='', upload=False, filter_branch=None, indentation=6): ...@@ -32,6 +33,16 @@ def build_workflows(prefix='', upload=False, filter_branch=None, indentation=6):
return indent(indentation, w) return indent(indentation, w)
def build_download_job(filter_branch):
job = {
"name": "download_third_parties_nix",
}
if filter_branch:
job["filters"] = gen_filter_branch_tree(filter_branch)
return [{"download_third_parties_nix": job}]
def build_workflow_pair(btype, os_type, python_version, filter_branch, prefix='', upload=False): def build_workflow_pair(btype, os_type, python_version, filter_branch, prefix='', upload=False):
w = [] w = []
...@@ -64,6 +75,9 @@ def generate_base_workflow(base_workflow_name, python_version, filter_branch, os ...@@ -64,6 +75,9 @@ def generate_base_workflow(base_workflow_name, python_version, filter_branch, os
"python_version": python_version, "python_version": python_version,
} }
if os_type in ['linux', 'macos']:
d['requires'] = ['download_third_parties_nix']
if filter_branch: if filter_branch:
d["filters"] = gen_filter_branch_tree(filter_branch) d["filters"] = gen_filter_branch_tree(filter_branch)
...@@ -112,6 +126,7 @@ def indent(indentation, data_list): ...@@ -112,6 +126,7 @@ def indent(indentation, data_list):
def unittest_workflows(indentation=6): def unittest_workflows(indentation=6):
jobs = [] jobs = []
jobs += build_download_job(None)
for os_type in ["linux", "windows"]: for os_type in ["linux", "windows"]:
for device_type in ["cpu", "gpu"]: for device_type in ["cpu", "gpu"]:
for i, python_version in enumerate(PYTHON_VERSIONS): for i, python_version in enumerate(PYTHON_VERSIONS):
...@@ -122,6 +137,10 @@ def unittest_workflows(indentation=6): ...@@ -122,6 +137,10 @@ def unittest_workflows(indentation=6):
if device_type == 'gpu': if device_type == 'gpu':
job['filters'] = gen_filter_branch_tree('master') job['filters'] = gen_filter_branch_tree('master')
if os_type != "windows":
job['requires'] = ['download_third_parties_nix']
jobs.append({f"unittest_{os_type}_{device_type}": job}) jobs.append({f"unittest_{os_type}_{device_type}": job})
if i == 0 and os_type == "linux" and device_type == "cpu": if i == 0 and os_type == "linux" and device_type == "cpu":
......
#!/bin/bash #!/bin/bash
# Build third party libraries in `<repo_root>/third_party/build` or in `<given_prefix>/third_party/build`. # Build third party libraries (SoX, lame, libmad, and flac)
# Usage: ./build_thid_parth.sh [prefix] [download_only?=false]
set -e set -e
...@@ -10,6 +11,7 @@ prefix="${1:-}" ...@@ -10,6 +11,7 @@ prefix="${1:-}"
if [ -z "${prefix}" ]; then if [ -z "${prefix}" ]; then
prefix="${root_dir}" prefix="${root_dir}"
fi fi
download_only="${2:-false}"
tp_dir="${prefix}/third_party" tp_dir="${prefix}/third_party"
tmp_dir="${tp_dir}/tmp" tmp_dir="${tp_dir}/tmp"
...@@ -20,17 +22,29 @@ mkdir -p "${tmp_dir}" "${build_dir}" ...@@ -20,17 +22,29 @@ mkdir -p "${tmp_dir}" "${build_dir}"
. "${this_dir}/build_third_party_helper.sh" . "${this_dir}/build_third_party_helper.sh"
if ! found_lame "${build_dir}" ; then if ! found_lame "${build_dir}" ; then
build_lame "${tmp_dir}" "${build_dir}" get_lame "${tmp_dir}"
if [ "${download_only}" = "false" ]; then
build_lame "${tmp_dir}" "${build_dir}"
fi
fi fi
if ! found_flac "${build_dir}" ; then if ! found_flac "${build_dir}" ; then
build_flac "${tmp_dir}" "${build_dir}" get_flac "${tmp_dir}"
if [ "${download_only}" = "false" ]; then
build_flac "${tmp_dir}" "${build_dir}"
fi
fi fi
if ! found_mad "${build_dir}" ; then if ! found_mad "${build_dir}" ; then
build_mad "${tmp_dir}" "${build_dir}" get_mad "${tmp_dir}"
if [ "${download_only}" = "false" ]; then
build_mad "${tmp_dir}" "${build_dir}"
fi
fi fi
if ! found_sox "${build_dir}" ; then if ! found_sox "${build_dir}" ; then
build_sox "${tmp_dir}" "${build_dir}" get_sox "${tmp_dir}"
if [ "${download_only}" = "false" ]; then
build_sox "${tmp_dir}" "${build_dir}"
fi
fi fi
...@@ -57,23 +57,34 @@ found_sox() { ...@@ -57,23 +57,34 @@ found_sox() {
all_found "$1" 'include/sox.h' 'lib/libsox.a' all_found "$1" 'include/sox.h' 'lib/libsox.a'
} }
build_lame() { LAME="lame-3.99.5"
LAME_ARCHIVE="${LAME}.tar.gz"
get_lame() {
work_dir="$1" work_dir="$1"
install_dir="$2" url="https://downloads.sourceforge.net/project/lame/lame/3.99/${LAME_ARCHIVE}"
package="lame-3.99.5"
url="https://downloads.sourceforge.net/project/lame/lame/3.99/lame-3.99.5.tar.gz"
( (
cd "${work_dir}" cd "${work_dir}"
if [ ! -d "${package}" ]; then if [ ! -d "${LAME}" ]; then
if [ ! -f "${package}.tar.gz" ]; then if [ ! -f "${LAME_ARCHIVE}" ]; then
printf "Fetching liblame from %s\n" "${url}" printf "Fetching liblame from %s\n" "${url}"
curl $CURL_OPTS -o "${package}.tar.gz" "${url}" curl $CURL_OPTS -O "${url}"
fi fi
tar xfp "${package}.tar.gz"
fi fi
)
}
build_lame() {
work_dir="$1"
install_dir="$2"
(
cd "${work_dir}"
if [ ! -d "${LAME}" ]; then
tar xfp "${LAME_ARCHIVE}"
fi
cd "${LAME}"
# build statically # build statically
printf "Building liblame\n" printf "Building liblame\n"
cd "${package}"
if [ ! -f Makefile ]; then if [ ! -f Makefile ]; then
./configure ${CONFIG_OPTS} \ ./configure ${CONFIG_OPTS} \
--disable-shared --enable-static --prefix="${install_dir}" CFLAGS=-fPIC CXXFLAGS=-fPIC \ --disable-shared --enable-static --prefix="${install_dir}" CFLAGS=-fPIC CXXFLAGS=-fPIC \
...@@ -84,23 +95,34 @@ build_lame() { ...@@ -84,23 +95,34 @@ build_lame() {
) )
} }
build_flac() { FLAC="flac-1.3.2"
FLAC_ARCHIVE="${FLAC}.tar.xz"
get_flac() {
work_dir="$1" work_dir="$1"
install_dir="$2" url="https://downloads.sourceforge.net/project/flac/flac-src/${FLAC_ARCHIVE}"
package="flac-1.3.2"
url="https://downloads.sourceforge.net/project/flac/flac-src/flac-1.3.2.tar.xz"
( (
cd "${work_dir}" cd "${work_dir}"
if [ ! -d "${package}" ]; then if [ ! -d "${FLAC}" ]; then
if [ ! -f "${package}.tar.xz" ]; then if [ ! -f "${FLAC_ARCHIVE}" ]; then
printf "Fetching flac from %s\n" "${url}" printf "Fetching flac from %s\n" "${url}"
curl $CURL_OPTS -o "${package}.tar.xz" "${url}" curl $CURL_OPTS -O "${url}"
fi fi
tar xfp "${package}.tar.xz"
fi fi
)
}
build_flac() {
work_dir="$1"
install_dir="$2"
(
cd "${work_dir}"
if [ ! -d "${FLAC}" ]; then
tar xfp "${FLAC_ARCHIVE}"
fi
cd "${FLAC}"
# build statically # build statically
printf "Building flac\n" printf "Building flac\n"
cd "${package}"
if [ ! -f Makefile ]; then if [ ! -f Makefile ]; then
./configure ${CONFIG_OPTS} \ ./configure ${CONFIG_OPTS} \
--disable-shared --enable-static --prefix="${install_dir}" CFLAGS=-fPIC CXXFLAGS=-fPIC \ --disable-shared --enable-static --prefix="${install_dir}" CFLAGS=-fPIC CXXFLAGS=-fPIC \
...@@ -111,23 +133,34 @@ build_flac() { ...@@ -111,23 +133,34 @@ build_flac() {
) )
} }
build_mad() { LIBMAD="libmad-0.15.1b"
LIBMAD_ARCHIVE="${LIBMAD}.tar.gz"
get_mad() {
work_dir="$1" work_dir="$1"
install_dir="$2" url="https://downloads.sourceforge.net/project/mad/libmad/0.15.1b/${LIBMAD_ARCHIVE}"
package="libmad-0.15.1b"
url="https://downloads.sourceforge.net/project/mad/libmad/0.15.1b/libmad-0.15.1b.tar.gz"
( (
cd "${work_dir}" cd "${work_dir}"
if [ ! -d "${package}" ]; then if [ ! -d "${LIBMAD}" ]; then
if [ ! -f "${package}.tar.gz" ]; then if [ ! -f "${LIBMAD_ARCHIVE}" ]; then
printf "Fetching mad from %s\n" "${url}" printf "Fetching mad from %s\n" "${url}"
curl $CURL_OPTS -o "${package}.tar.gz" "${url}" curl $CURL_OPTS -O "${url}"
fi fi
tar xfp "${package}.tar.gz"
fi fi
)
}
build_mad() {
work_dir="$1"
install_dir="$2"
(
cd "${work_dir}"
if [ ! -d "${LIBMAD}" ]; then
tar xfp "${LIBMAD_ARCHIVE}"
fi
cd "${LIBMAD}"
# build statically # build statically
printf "Building mad\n" printf "Building mad\n"
cd "${package}"
if [ ! -f Makefile ]; then if [ ! -f Makefile ]; then
# See https://stackoverflow.com/a/12864879/23845 # See https://stackoverflow.com/a/12864879/23845
sed -i.bak 's/-march=i486//' configure sed -i.bak 's/-march=i486//' configure
...@@ -140,23 +173,34 @@ build_mad() { ...@@ -140,23 +173,34 @@ build_mad() {
) )
} }
build_sox() { SOX="sox-14.4.2"
SOX_ARCHIVE="${SOX}.tar.bz2"
get_sox() {
work_dir="$1" work_dir="$1"
install_dir="$2" url="https://downloads.sourceforge.net/project/sox/sox/14.4.2/${SOX_ARCHIVE}"
package="sox-14.4.2"
url="https://downloads.sourceforge.net/project/sox/sox/14.4.2/sox-14.4.2.tar.bz2"
( (
cd "${work_dir}" cd "${work_dir}"
if [ ! -d "${package}" ]; then if [ ! -d "${SOX}" ]; then
if [ ! -f "${package}.tar.bz2" ]; then if [ ! -f "${SOX_ARCHIVE}" ]; then
printf "Fetching SoX from %s\n" "${url}" printf "Fetching SoX from %s\n" "${url}"
curl $CURL_OPTS -o "${package}.tar.bz2" "${url}" curl $CURL_OPTS -O "${url}"
fi fi
tar xfp "${package}.tar.bz2"
fi fi
)
}
build_sox() {
work_dir="$1"
install_dir="$2"
(
cd "${work_dir}"
if [ ! -d "${SOX}" ]; then
tar xfp "${SOX_ARCHIVE}"
fi
cd "${SOX}"
# build statically # build statically
printf "Building sox\n" printf "Building SoX\n"
cd "${package}"
if [ ! -f Makefile ]; then if [ ! -f Makefile ]; then
# --without-png makes OS X build less hazardous; somehow the build # --without-png makes OS X build less hazardous; somehow the build
# finds png and enables it. We don't want it; we'd need to package # finds png and enables it. We don't want it; we'd need to package
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment